From 676f5e6d7f184101b8763e4249b18b237bbe0ec7 Mon Sep 17 00:00:00 2001 From: QiuMM Date: Mon, 29 Oct 2018 20:02:43 +0800 Subject: [PATCH] Prohibit some guava collection APIs and use JDK collection APIs directly (#6511) * Prohibit some guava collection APIs and use JDK APIs directly * reset files that changed by accident * sort codestyle/druid-forbidden-apis.txt alphabetically --- ...ressedVSizeColumnarMultiIntsBenchmark.java | 4 +- .../FilteredAggregatorBenchmark.java | 4 +- .../GroupByTypeInterfaceBenchmark.java | 4 +- .../benchmark/MergeSequenceBenchmark.java | 3 +- .../benchmark/TopNTypeInterfaceBenchmark.java | 4 +- .../benchmark/query/GroupByBenchmark.java | 13 ++- .../benchmark/query/SearchBenchmark.java | 22 ++--- .../benchmark/query/SelectBenchmark.java | 8 +- .../druid/benchmark/query/SqlBenchmark.java | 4 +- .../benchmark/query/TimeseriesBenchmark.java | 9 +- .../druid/benchmark/query/TopNBenchmark.java | 9 +- .../timecompare/TimeCompareBenchmark.java | 5 +- codestyle/druid-forbidden-apis.txt | 37 ++++++--- .../org/apache/druid/data/input/Rows.java | 4 +- .../druid/data/input/impl/DimensionsSpec.java | 8 +- .../apache/druid/guice/JsonConfigurator.java | 7 +- .../org/apache/druid/guice/ListProvider.java | 3 +- .../druid/indexer/TaskMetricsUtils.java | 5 +- .../druid/java/util/common/FileUtils.java | 4 +- .../druid/java/util/common/JodaUtils.java | 13 ++- .../util/common/granularity/Granularity.java | 4 +- .../java/util/common/guava/MergeIterable.java | 5 +- .../util/common/io/smoosh/FileSmoosher.java | 13 ++- .../common/io/smoosh/SmooshedFileMapper.java | 7 +- .../common/parsers/JSONToLowerParser.java | 3 +- .../java/util/common/parsers/ParserUtils.java | 6 +- .../java/util/common/parsers/RegexParser.java | 3 +- .../java/util/emitter/core/Emitters.java | 9 +- .../emitter/service/ServiceMetricEvent.java | 3 +- .../druid/java/util/http/client/Request.java | 8 +- .../org/apache/druid/math/expr/Parser.java | 8 +- .../metadata/MetadataStorageTablesConfig.java | 8 +- .../timeline/VersionedIntervalTimeline.java | 15 ++-- .../timeline/partition/PartitionHolder.java | 7 +- .../druid/collections/BlockingPoolTest.java | 6 +- .../collections/CombiningIterableTest.java | 3 +- .../collections/OrderedMergeIteratorTest.java | 12 +-- .../collections/OrderedMergeSequenceTest.java | 2 +- .../data/input/impl/CSVParseSpecTest.java | 10 +-- .../input/impl/DelimitedParseSpecTest.java | 14 ++-- .../input/impl/FileIteratingFirehoseTest.java | 3 +- .../impl/JSONLowercaseParseSpecTest.java | 6 +- .../druid/data/input/impl/ParseSpecTest.java | 11 +-- .../data/input/impl/SqlFirehoseTest.java | 3 +- ...fetchableTextFilesFirehoseFactoryTest.java | 5 +- .../common/guava/FilteredSequenceTest.java | 3 +- .../util/common/guava/MappedSequenceTest.java | 3 +- .../util/common/lifecycle/LifecycleTest.java | 8 +- .../druid/timeline/DataSegmentTest.java | 4 +- .../wikipedia/WikipediaIrcDecoder.java | 8 +- .../ImmutableConciseSetIntersectionTest.java | 37 ++++----- .../intset/ImmutableConciseSetTest.java | 61 +++++++------- ...asedDruidToTimelineEventConverterTest.java | 4 +- .../azure/AzureDataSegmentPusherTest.java | 10 +-- .../CloudFilesDataSegmentPusherTest.java | 10 +-- .../google/GoogleDataSegmentFinder.java | 4 +- .../google/GoogleDataSegmentPusherTest.java | 10 +-- .../graphite/WhiteListBasedConverterTest.java | 4 +- ...fkaEightSimpleConsumerFirehoseFactory.java | 8 +- .../MaterializedViewSupervisor.java | 32 +++---- .../MaterializedViewSupervisorSpec.java | 16 ++-- .../MaterializedViewSupervisorTest.java | 4 +- .../materializedview/DataSourceOptimizer.java | 14 ++-- .../DerivativeDataSourceManager.java | 9 +- .../MaterializedViewUtils.java | 4 +- .../input/orc/OrcHadoopInputRowParser.java | 4 +- .../input/orc/OrcIndexGeneratorJobTest.java | 7 +- .../parquet/ParquetHadoopInputRowParser.java | 4 +- .../parquet/avro/DruidParquetReadSupport.java | 10 +-- .../input/parquet/DruidParquetInputTest.java | 4 +- .../TimestampGroupByAggregationTest.java | 3 +- .../hll/HllSketchUnionPostAggregator.java | 8 +- .../theta/SketchEstimatePostAggregator.java | 4 +- .../theta/SketchSetPostAggregator.java | 4 +- .../hll/HllSketchAggregatorTest.java | 26 +++--- .../DoublesSketchAggregatorTest.java | 4 +- .../theta/SketchAggregationTest.java | 3 +- .../SketchAggregationWithSimpleDataTest.java | 4 +- .../oldapi/OldApiSketchAggregationTest.java | 3 +- .../ArrayOfDoublesSketchAggregationTest.java | 4 +- .../druid/security/basic/BasicAuthUtils.java | 8 +- ...atorBasicAuthenticatorResourceHandler.java | 3 +- ...aultBasicAuthenticatorResourceHandler.java | 3 +- ...dinatorBasicAuthorizerResourceHandler.java | 3 +- ...DefaultBasicAuthorizerResourceHandler.java | 3 +- ...cAuthorizerMetadataStorageUpdaterTest.java | 18 ++-- .../hdfs/HdfsDataSegmentPusherTest.java | 22 ++--- .../histogram/ApproximateHistogram.java | 9 +- .../ApproximateHistogramAggregationTest.java | 3 +- .../ApproximateHistogramGroupByQueryTest.java | 4 +- ...ementalPublishingKafkaIndexTaskRunner.java | 17 ++-- .../kafka/KafkaDataSourceMetadata.java | 6 +- .../kafka/LegacyKafkaIndexTaskRunner.java | 20 ++--- .../kafka/supervisor/KafkaSupervisor.java | 33 ++++---- .../kafka/KafkaIndexTaskClientTest.java | 37 +++++---- .../indexing/kafka/KafkaIndexTaskTest.java | 12 +-- .../druid/indexing/kafka/test/TestBroker.java | 6 +- .../storage/s3/S3DataSegmentFinderTest.java | 3 +- .../storage/s3/S3DataSegmentMoverTest.java | 7 +- .../storage/s3/S3DataSegmentPusherTest.java | 10 +-- .../PvaluefromZscorePostAggregator.java | 4 +- .../teststats/ZtestPostAggregator.java | 4 +- .../variance/VarianceTestHelper.java | 7 +- .../indexer/DetermineHashedPartitionsJob.java | 10 ++- .../druid/indexer/DeterminePartitionsJob.java | 12 +-- .../HadoopDruidDetermineConfigurationJob.java | 4 +- .../indexer/HadoopDruidIndexerConfig.java | 8 +- .../druid/indexer/HadoopDruidIndexerJob.java | 4 +- .../druid/indexer/HadoopIngestionSpec.java | 3 +- .../druid/indexer/IndexGeneratorJob.java | 13 +-- .../apache/druid/indexer/InputRowSerde.java | 8 +- .../java/org/apache/druid/indexer/Utils.java | 4 +- .../hadoop/DatasourceRecordReader.java | 5 +- .../indexer/path/DatasourcePathSpec.java | 3 +- .../path/GranularUnprocessedPathSpec.java | 4 +- .../indexer/path/GranularityPathSpec.java | 6 +- .../druid/indexer/path/StaticPathSpec.java | 4 +- .../indexer/BatchDeltaIngestionTest.java | 4 +- .../indexer/HadoopDruidIndexerConfigTest.java | 4 +- .../druid/indexer/IndexGeneratorJobTest.java | 9 +- .../druid/indexer/InputRowSerdeTest.java | 4 +- .../hadoop/DatasourceInputFormatTest.java | 4 +- .../hadoop/DatasourceRecordReaderTest.java | 4 +- .../ActionBasedUsedSegmentChecker.java | 10 +-- .../druid/indexing/common/TaskReport.java | 4 +- .../common/index/YeOldePlumberSchool.java | 8 +- .../stats/DropwizardRowIngestionMeters.java | 10 +-- .../indexing/common/task/AppendTask.java | 4 +- .../AppenderatorDriverRealtimeIndexTask.java | 12 +-- .../indexing/common/task/HadoopIndexTask.java | 8 +- .../indexing/common/task/HadoopTask.java | 2 +- .../druid/indexing/common/task/IndexTask.java | 13 ++- .../indexing/common/task/MergeTaskBase.java | 5 +- .../indexing/overlord/ForkingTaskRunner.java | 11 +-- .../overlord/HeapMemoryTaskStorage.java | 4 +- .../druid/indexing/overlord/PortFinder.java | 4 +- .../druid/indexing/overlord/TaskLockbox.java | 11 ++- .../druid/indexing/overlord/TaskQueue.java | 11 +-- .../overlord/TaskStorageQueryAdapter.java | 4 +- .../druid/indexing/overlord/ZkWorker.java | 8 +- ...ngTaskBasedWorkerProvisioningStrategy.java | 13 +-- .../SimpleWorkerProvisioningStrategy.java | 5 +- .../autoscaling/ec2/EC2AutoScaler.java | 7 +- .../indexing/overlord/hrtr/WorkerHolder.java | 4 +- .../overlord/http/OverlordResource.java | 11 ++- .../worker/WorkerCuratorCoordinator.java | 4 +- ...penderatorDriverRealtimeIndexTaskTest.java | 4 +- .../indexing/common/task/IndexTaskTest.java | 37 ++++----- .../common/task/RealtimeIndexTaskTest.java | 10 +-- .../task/SameIntervalMergeTaskTest.java | 4 +- ...stractParallelIndexSupervisorTaskTest.java | 6 +- .../ParallelIndexSupervisorTaskSerdeTest.java | 6 +- .../IngestSegmentFirehoseFactoryTest.java | 11 ++- ...estSegmentFirehoseFactoryTimelineTest.java | 12 +-- .../indexing/overlord/TaskLifecycleTest.java | 5 +- .../autoscaling/EC2AutoScalerTest.java | 3 +- ...dingTaskBasedProvisioningStrategyTest.java | 34 ++++---- .../SimpleProvisioningStrategyTest.java | 24 +++--- ...nWithAffinityWorkerSelectStrategyTest.java | 31 +++---- ...lDistributionWorkerSelectStrategyTest.java | 51 ++++++------ ...yWithAffinityWorkerSelectStrategyTest.java | 23 ++--- .../supervisor/SupervisorResourceTest.java | 12 +-- ...TestIndexerMetadataStorageCoordinator.java | 7 +- .../worker/WorkerTaskMonitorTest.java | 4 +- .../druid/tests/indexer/ITUnionQueryTest.java | 4 +- .../druid/collections/spatial/Point.java | 4 +- .../apache/druid/guice/GuiceInjectors.java | 4 +- .../org/apache/druid/query/BaseQuery.java | 4 +- .../java/org/apache/druid/query/Druids.java | 9 +- .../druid/query/ExecutorServiceMonitor.java | 4 +- .../query/IntervalChunkingQueryRunner.java | 3 +- ...portTimelineMissingSegmentQueryRunner.java | 4 +- .../apache/druid/query/RetryQueryRunner.java | 8 +- .../query/aggregation/AggregatorUtil.java | 2 +- .../HistogramAggregatorFactory.java | 4 +- .../post/ArithmeticPostAggregator.java | 8 +- .../post/ConstantPostAggregator.java | 4 +- .../post/DoubleGreatestPostAggregator.java | 4 +- .../post/DoubleLeastPostAggregator.java | 4 +- .../post/LongGreatestPostAggregator.java | 4 +- .../post/LongLeastPostAggregator.java | 4 +- .../druid/query/cache/CacheKeyBuilder.java | 3 +- .../DataSourceMetadataQuery.java | 4 +- .../druid/query/filter/AndDimFilter.java | 3 +- .../druid/query/filter/DimFilterUtils.java | 7 +- .../druid/query/filter/OrDimFilter.java | 3 +- .../druid/query/groupby/GroupByQuery.java | 9 +- .../query/groupby/GroupByQueryEngine.java | 8 +- .../query/groupby/GroupByQueryHelper.java | 3 +- .../groupby/GroupByQueryQueryToolChest.java | 7 +- .../GroupByMergingQueryRunnerV2.java | 3 +- .../LimitedTemporaryStorage.java | 4 +- .../epinephelinae/RowBasedGrouperHelper.java | 13 ++- .../epinephelinae/SpillingGrouper.java | 5 +- ...ngStringGroupByColumnSelectorStrategy.java | 4 +- .../groupby/orderby/DefaultLimitSpec.java | 12 +-- .../groupby/strategy/GroupByStrategyV1.java | 10 +-- .../groupby/strategy/GroupByStrategyV2.java | 4 +- .../druid/query/metadata/SegmentAnalyzer.java | 8 +- .../SegmentMetadataQueryQueryToolChest.java | 12 +-- .../SegmentMetadataQueryRunnerFactory.java | 7 +- .../metadata/ListColumnIncluderator.java | 4 +- .../apache/druid/query/scan/ScanQuery.java | 4 +- .../search/SearchQueryQueryToolChest.java | 4 +- .../query/search/UseIndexesStrategy.java | 6 +- .../apache/druid/query/select/PagingSpec.java | 6 +- .../druid/query/select/SelectQueryEngine.java | 3 +- .../select/SelectQueryQueryToolChest.java | 7 +- .../select/SelectResultValueBuilder.java | 10 +-- .../spec/SpecificSegmentQueryRunner.java | 4 +- .../query/timeboundary/TimeBoundaryQuery.java | 10 +-- .../TimeseriesQueryQueryToolChest.java | 2 +- .../topn/TimeExtractionTopNAlgorithm.java | 4 +- .../druid/query/topn/TopNQueryBuilder.java | 4 +- .../druid/query/topn/TopNResultValue.java | 3 +- .../org/apache/druid/segment/IndexIO.java | 16 ++-- .../org/apache/druid/segment/IndexMerger.java | 6 +- .../apache/druid/segment/IndexMergerV9.java | 15 ++-- .../druid/segment/StringDimensionIndexer.java | 3 +- .../apache/druid/segment/VirtualColumns.java | 6 +- .../segment/column/ColumnDescriptor.java | 4 +- .../segment/data/CompressionFactory.java | 4 +- .../segment/data/CompressionStrategy.java | 4 +- .../apache/druid/segment/filter/Filters.java | 12 +-- .../apache/druid/segment/filter/OrFilter.java | 3 +- .../segment/incremental/IncrementalIndex.java | 6 +- .../incremental/OffheapIncrementalIndex.java | 4 +- .../incremental/OnheapIncrementalIndex.java | 3 +- .../SpatialDimensionRowTransformer.java | 9 +- .../druid/segment/serde/ComplexMetrics.java | 4 +- .../segment/virtual/ExpressionSelectors.java | 4 +- .../bitmap/WrappedBitSetBitmapBitSetTest.java | 3 +- .../spatial/ImmutableRTreeTest.java | 5 +- .../apache/druid/query/DoubleStorageTest.java | 3 +- .../druid/query/MultiValuedDimensionTest.java | 8 +- .../druid/query/QueryRunnerTestHelper.java | 4 +- .../druid/query/RetryQueryRunnerTest.java | 24 +++--- .../druid/query/SchemaEvolutionTest.java | 6 +- .../druid/query/TimewarpOperatorTest.java | 4 +- .../druid/query/UnionQueryRunnerTest.java | 4 +- .../aggregation/AggregationTestHelper.java | 6 +- .../JavaScriptAggregatorBenchmark.java | 4 +- .../aggregation/JavaScriptAggregatorTest.java | 6 +- .../CardinalityAggregatorTest.java | 6 +- .../HyperUniquesAggregationTest.java | 4 +- ...nalizingFieldAccessPostAggregatorTest.java | 15 ++-- .../post/JavaScriptPostAggregatorTest.java | 3 +- .../query/cache/CacheKeyBuilderTest.java | 11 +-- .../DataSourceMetadataQueryTest.java | 4 +- .../extraction/CascadeExtractionFnTest.java | 3 +- .../extraction/MapLookupExtractorTest.java | 3 +- .../MatchingDimExtractionFnTest.java | 4 +- .../extraction/RegexDimExtractionFnTest.java | 15 ++-- .../SearchQuerySpecDimExtractionFnTest.java | 8 +- .../extraction/TimeDimExtractionFnTest.java | 8 +- .../query/filter/DimFilterUtilsTest.java | 8 +- ...ByLimitPushDownInsufficientBufferTest.java | 15 ++-- ...roupByLimitPushDownMultiNodeMergeTest.java | 15 ++-- .../groupby/GroupByMultiSegmentTest.java | 11 ++- .../groupby/GroupByQueryMergeBufferTest.java | 3 +- .../GroupByQueryRunnerFactoryTest.java | 4 +- .../GroupByQueryRunnerFailureTest.java | 4 +- .../query/groupby/GroupByQueryRunnerTest.java | 83 +++++++++---------- .../groupby/GroupByQueryRunnerTestHelper.java | 9 +- .../groupby/NestedQueryPushDownTest.java | 25 +++--- .../epinephelinae/BufferHashGrouperTest.java | 5 +- .../ByteBufferMinMaxOffsetHeapTest.java | 18 ++-- .../LimitedBufferHashGrouperTest.java | 5 +- .../groupby/orderby/DefaultLimitSpecTest.java | 4 +- .../query/lookup/LookupExtractionFnTest.java | 4 +- ...egmentMetadataQueryQueryToolChestTest.java | 30 +++---- .../metadata/SegmentMetadataQueryTest.java | 24 +++--- .../SegmentMetadataUnionQueryTest.java | 4 +- .../druid/query/scan/ScanQueryRunnerTest.java | 15 ++-- .../search/SearchQueryRunnerWithCaseTest.java | 19 +++-- .../select/MultiSegmentSelectQueryTest.java | 4 +- .../druid/query/select/PagingOffsetTest.java | 4 +- .../query/select/SelectQueryRunnerTest.java | 18 ++-- .../spec/SpecificSegmentQueryRunnerTest.java | 12 +-- .../TimeBoundaryQueryRunnerTest.java | 10 +-- .../timeseries/TimeseriesQueryRunnerTest.java | 13 ++- .../topn/TopNQueryQueryToolChestTest.java | 4 +- .../query/topn/TopNQueryRunnerBenchmark.java | 3 +- .../druid/query/topn/TopNQueryRunnerTest.java | 15 ++-- .../query/topn/TopNQueryRunnerTestHelper.java | 4 +- .../org/apache/druid/segment/AppendTest.java | 3 +- .../apache/druid/segment/IndexBuilder.java | 5 +- .../IndexMergerV9WithSpatialIndexTest.java | 8 +- .../druid/segment/SchemalessIndexTest.java | 25 +++--- .../org/apache/druid/segment/TestHelper.java | 4 +- .../segment/data/GenericIndexedTest.java | 3 +- .../druid/segment/filter/BaseFilterTest.java | 13 ++- .../segment/filter/ExpressionFilterTest.java | 3 +- .../druid/segment/filter/InFilterTest.java | 3 +- .../filter/SpatialFilterBonusTest.java | 12 +-- .../segment/filter/SpatialFilterTest.java | 8 +- .../IncrementalIndexRowCompTest.java | 4 +- .../IncrementalIndexRowSizeTest.java | 4 +- .../incremental/IncrementalIndexTest.java | 3 +- .../apache/druid/client/BrokerServerView.java | 6 +- .../druid/client/CachingClusteredClient.java | 15 ++-- .../druid/client/CoordinatorServerView.java | 6 +- .../druid/client/HttpServerInventoryView.java | 3 +- .../apache/druid/client/ServerViewUtil.java | 4 +- .../druid/client/cache/HybridCache.java | 4 +- .../apache/druid/client/cache/MapCache.java | 6 +- .../druid/client/cache/MemcachedCache.java | 2 +- .../selector/CustomTierSelectorStrategy.java | 4 +- .../CustomTierSelectorStrategyConfig.java | 4 +- .../druid/curator/announcement/Announcer.java | 9 +- .../discovery/CuratorServiceAnnouncer.java | 4 +- .../curator/discovery/DiscoveryModule.java | 4 +- .../druid/guice/FireDepartmentsProvider.java | 4 +- .../druid/initialization/Initialization.java | 10 +-- .../IndexerSQLMetadataStorageCoordinator.java | 10 +-- .../metadata/SQLMetadataRuleManager.java | 10 +-- .../metadata/SQLMetadataSegmentManager.java | 2 +- .../SQLMetadataSupervisorManager.java | 10 +-- .../druid/segment/indexing/DataSchema.java | 4 +- .../granularity/ArbitraryGranularitySpec.java | 7 +- .../granularity/UniformGranularitySpec.java | 4 +- .../SegmentLoaderLocalCacheManager.java | 3 +- .../segment/loading/StorageLocation.java | 4 +- .../segment/realtime/RealtimeManager.java | 5 +- .../realtime/RealtimeMetricsMonitor.java | 4 +- .../AppenderatorDriverMetadata.java | 8 +- .../appenderator/AppenderatorImpl.java | 20 ++--- .../appenderator/AppenderatorPlumber.java | 3 +- .../realtime/appenderator/Committed.java | 6 +- .../EventReceiverFirehoseFactory.java | 4 +- .../firehose/IngestSegmentFirehose.java | 5 +- .../realtime/plumber/FlushingPlumber.java | 4 +- .../realtime/plumber/RealtimePlumber.java | 10 +-- .../druid/segment/realtime/plumber/Sink.java | 10 +-- .../druid/server/ClientInfoResource.java | 19 +++-- .../apache/druid/server/QueryResource.java | 5 +- .../BatchDataSegmentAnnouncer.java | 8 +- .../coordination/SegmentLoadDropHandler.java | 6 +- .../coordinator/CostBalancerStrategy.java | 5 +- .../coordinator/CuratorLoadQueuePeon.java | 4 +- .../coordinator/DatasourceWhitelist.java | 4 +- .../server/coordinator/DruidCoordinator.java | 15 ++-- .../DruidCoordinatorRuntimeParams.java | 6 +- .../server/coordinator/HttpLoadQueuePeon.java | 3 +- .../coordinator/ReplicationThrottler.java | 12 +-- .../coordinator/SegmentReplicantLookup.java | 4 +- .../DruidCoordinatorCleanupOvershadowed.java | 4 +- .../helper/DruidCoordinatorSegmentMerger.java | 7 +- .../server/coordinator/rules/RuleMap.java | 5 +- .../druid/server/emitter/EmitterModule.java | 4 +- .../server/http/DatasourcesResource.java | 44 +++++----- .../druid/server/http/IntervalsResource.java | 19 +++-- .../druid/server/http/MetadataResource.java | 5 +- .../druid/server/http/TiersResource.java | 14 ++-- .../AuthorizerMapperModule.java | 4 +- .../log/ComposingRequestLoggerProvider.java | 3 +- .../druid/server/metrics/MetricsModule.java | 4 +- .../druid/server/metrics/MonitorsConfig.java | 4 +- .../server/router/CoordinatorRuleManager.java | 4 +- .../druid/server/security/AuthTestUtils.java | 5 +- .../server/security/AuthorizationUtils.java | 10 +-- .../PreResponseAuthorizationCheckFilter.java | 4 +- .../client/CachingClusteredClientTest.java | 73 ++++++++-------- .../druid/client/CachingQueryRunnerTest.java | 5 +- .../druid/client/DirectDruidClientTest.java | 21 +++-- .../client/cache/CacheDistributionTest.java | 4 +- .../druid/client/cache/CaffeineCacheTest.java | 3 +- .../client/cache/MemcachedCacheBenchmark.java | 4 +- .../client/cache/MemcachedCacheTest.java | 4 +- .../client/indexing/ClientMergeQueryTest.java | 4 +- .../selector/TierSelectorStrategyTest.java | 11 ++- .../CuratorInventoryManagerTest.java | 4 +- .../initialization/InitializationTest.java | 2 +- .../CombiningFirehoseFactoryTest.java | 4 +- .../SegmentLoaderLocalCacheManagerTest.java | 12 +-- .../segment/realtime/RealtimeManagerTest.java | 4 +- .../realtime/appenderator/CommittedTest.java | 4 +- .../StreamAppenderatorDriverTest.java | 4 +- .../appenderator/TestUsedSegmentChecker.java | 4 +- .../firehose/LocalFirehoseFactoryTest.java | 5 +- .../firehose/SqlFirehoseFactoryTest.java | 5 +- .../plumber/RealtimePlumberSchoolTest.java | 10 +-- .../segment/realtime/plumber/SinkTest.java | 10 +-- .../druid/server/SegmentManagerTest.java | 22 ++--- .../druid/server/StatusResourceTest.java | 4 +- .../coordination/ServerManagerTest.java | 6 +- .../BatchDataSegmentAnnouncerTest.java | 7 +- .../coordinator/CoordinatorStatsTest.java | 4 +- .../coordinator/CostBalancerStrategyTest.java | 14 ++-- .../CuratorDruidCoordinatorTest.java | 5 +- ...iskNormalizedCostBalancerStrategyTest.java | 14 ++-- .../DruidCoordinatorBalancerProfiler.java | 19 ++--- .../DruidCoordinatorBalancerTest.java | 27 +++--- .../DruidCoordinatorRuleRunnerTest.java | 38 ++++----- .../DruidCoordinatorSegmentMergerTest.java | 4 +- .../coordinator/DruidCoordinatorTest.java | 6 +- .../ReservoirSegmentSamplerTest.java | 31 +++---- .../cost/SegmentsCostCacheTest.java | 5 +- .../rules/BroadcastDistributionRuleTest.java | 29 +++---- .../coordinator/rules/LoadRuleTest.java | 18 ++-- .../shard/SingleDimensionShardSpecTest.java | 3 +- .../druid/timeline/DataSegmentTest.java | 4 +- .../apache/druid/cli/CliHadoopIndexer.java | 12 +-- .../druid/cli/CliInternalHadoopIndexer.java | 4 +- .../org/apache/druid/cli/DumpSegment.java | 7 +- .../apache/druid/cli/PullDependencies.java | 16 ++-- .../DataSegmentPusherDefaultConverter.java | 4 +- .../druid/cli/PullDependenciesTest.java | 4 +- .../druid/sql/avatica/DruidConnection.java | 2 +- .../sql/calcite/aggregation/Aggregation.java | 8 +- .../sql/calcite/expression/Expressions.java | 3 +- .../sql/calcite/expression/ExtractionFns.java | 4 +- .../calcite/filtration/BottomUpTransform.java | 6 +- .../filtration/CombineAndSimplifyBounds.java | 6 +- .../filtration/ConvertSelectorsToIns.java | 9 +- .../MoveTimeFiltersToIntervals.java | 8 +- .../sql/calcite/filtration/RangeSets.java | 4 +- .../calcite/planner/DruidOperatorTable.java | 6 +- .../sql/calcite/planner/DruidPlanner.java | 4 +- .../sql/calcite/planner/PlannerContext.java | 4 +- .../druid/sql/calcite/rel/DruidQuery.java | 4 +- .../druid/sql/calcite/rel/Grouping.java | 4 +- .../druid/sql/calcite/rel/QueryMaker.java | 4 +- .../druid/sql/calcite/table/RowSignature.java | 8 +- .../sql/avatica/DruidAvaticaHandlerTest.java | 9 +- .../druid/sql/calcite/CalciteQueryTest.java | 4 +- .../druid/sql/calcite/util/CalciteTests.java | 4 +- .../SpecificSegmentsQuerySegmentWalker.java | 10 +-- 428 files changed, 1806 insertions(+), 1795 deletions(-) diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java index fe2e42dc544..1c7c350df32 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/CompressedVSizeColumnarMultiIntsBenchmark.java @@ -21,7 +21,6 @@ package org.apache.druid.benchmark; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.segment.data.ColumnarInts; import org.apache.druid.segment.data.ColumnarMultiInts; @@ -45,6 +44,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.WritableByteChannel; +import java.util.ArrayList; import java.util.BitSet; import java.util.List; import java.util.Random; @@ -73,7 +73,7 @@ public class CompressedVSizeColumnarMultiIntsBenchmark public void setup() throws IOException { Random rand = ThreadLocalRandom.current(); - List rows = Lists.newArrayList(); + List rows = new ArrayList<>(); final int bound = 1 << bytes; for (int i = 0; i < 0x100000; i++) { int count = rand.nextInt(valuesPerRowBound) + 1; diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java index 172bce90034..821986fd097 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/FilteredAggregatorBenchmark.java @@ -20,7 +20,6 @@ package org.apache.druid.benchmark; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -91,6 +90,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; @@ -243,7 +243,7 @@ public class FilteredAggregatorBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java index 419cfe42faf..bf9064dc484 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/GroupByTypeInterfaceBenchmark.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -94,6 +93,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -451,7 +451,7 @@ public class GroupByTypeInterfaceBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/MergeSequenceBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/MergeSequenceBenchmark.java index bb15b896ddb..150bddcb8ef 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/MergeSequenceBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/MergeSequenceBenchmark.java @@ -19,7 +19,6 @@ package org.apache.druid.benchmark; -import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import org.apache.druid.java.util.common.guava.MergeSequence; @@ -64,7 +63,7 @@ public class MergeSequenceBenchmark public void setup() { Random rand = ThreadLocalRandom.current(); - sequences = Lists.newArrayList(); + sequences = new ArrayList<>(); for (int i = 0; i < count; i++) { int[] sequence = new int[sequenceLength]; for (int j = 0; j < sequenceLength; j++) { diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java index e580ad455f1..3b07c7bd166 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/TopNTypeInterfaceBenchmark.java @@ -20,7 +20,6 @@ package org.apache.druid.benchmark; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo; @@ -86,6 +85,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -319,7 +319,7 @@ public class TopNTypeInterfaceBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/GroupByBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/GroupByBenchmark.java index 10bee8d8e6f..65365f850f6 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/GroupByBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/GroupByBenchmark.java @@ -26,8 +26,6 @@ import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -103,6 +101,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -547,7 +546,7 @@ public class GroupByBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -600,7 +599,7 @@ public class GroupByBenchmark (QueryToolChest) toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); for (Row result : results) { @@ -624,7 +623,7 @@ public class GroupByBenchmark final GroupByQuery spillingQuery = query.withOverriddenContext( ImmutableMap.of("bufferGrouperMaxSize", 4000) ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(spillingQuery), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(spillingQuery), new HashMap<>()); List results = queryResult.toList(); for (Row result : results) { @@ -651,7 +650,7 @@ public class GroupByBenchmark (QueryToolChest) toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); for (Row result : results) { @@ -661,7 +660,7 @@ public class GroupByBenchmark private List> makeMultiRunners() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; QueryRunner runner = QueryBenchmarkUtil.makeQueryRunner( diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SearchBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SearchBenchmark.java index c945c964ac9..3b2f6679c9d 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SearchBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SearchBenchmark.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -96,6 +95,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -199,21 +199,21 @@ public class SearchBenchmark { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); - final List dimUniformFilterVals = Lists.newArrayList(); + final List dimUniformFilterVals = new ArrayList<>(); int resultNum = (int) (100000 * 0.1); int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { dimUniformFilterVals.add(String.valueOf(i)); } - List dimHyperUniqueFilterVals = Lists.newArrayList(); + List dimHyperUniqueFilterVals = new ArrayList<>(); resultNum = (int) (100000 * 0.1); step = 100000 / resultNum; for (int i = 0; i < 100001 && dimHyperUniqueFilterVals.size() < resultNum; i += step) { dimHyperUniqueFilterVals.add(String.valueOf(i)); } - final List dimFilters = Lists.newArrayList(); + final List dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter("dimUniform", dimUniformFilterVals, null)); dimFilters.add(new InDimFilter("dimHyperUnique", dimHyperUniqueFilterVals, null)); @@ -230,7 +230,7 @@ public class SearchBenchmark { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); - final List dimUniformFilterVals = Lists.newArrayList(); + final List dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { @@ -238,7 +238,7 @@ public class SearchBenchmark } final String dimName = "dimUniform"; - final List dimFilters = Lists.newArrayList(); + final List dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, IdentityExtractionFn.getInstance())); dimFilters.add(new SelectorDimFilter(dimName, "3", StrlenExtractionFn.instance())); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, new DimExtractionFn() @@ -284,7 +284,7 @@ public class SearchBenchmark { final QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval())); - final List dimUniformFilterVals = Lists.newArrayList(); + final List dimUniformFilterVals = new ArrayList<>(); final int resultNum = (int) (100000 * 0.1); final int step = 100000 / resultNum; for (int i = 1; i < 100001 && dimUniformFilterVals.size() < resultNum; i += step) { @@ -292,7 +292,7 @@ public class SearchBenchmark } final String dimName = "dimUniform"; - final List dimFilters = Lists.newArrayList(); + final List dimFilters = new ArrayList<>(); dimFilters.add(new InDimFilter(dimName, dimUniformFilterVals, null)); dimFilters.add(new SelectorDimFilter(dimName, "3", null)); dimFilters.add(new BoundDimFilter(dimName, "100", "10000", true, true, true, null, null)); @@ -402,7 +402,7 @@ public class SearchBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -448,7 +448,7 @@ public class SearchBenchmark @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndex(Blackhole blackhole) { - List> singleSegmentRunners = Lists.newArrayList(); + List> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; @@ -469,7 +469,7 @@ public class SearchBenchmark Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + new HashMap<>() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SelectBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SelectBenchmark.java index 766b8c18f0f..c62aa1d0538 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SelectBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SelectBenchmark.java @@ -22,8 +22,6 @@ package org.apache.druid.benchmark.query; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -266,7 +264,7 @@ public class SelectBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -353,7 +351,7 @@ public class SelectBenchmark SelectQuery queryCopy = query.withPagingSpec(PagingSpec.newSpec(pagingThreshold)); String segmentName; - List>> singleSegmentRunners = Lists.newArrayList(); + List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); for (int i = 0; i < numSegments; i++) { segmentName = "qIndex" + i; @@ -375,7 +373,7 @@ public class SelectBenchmark boolean done = false; while (!done) { - Sequence> queryResult = theRunner.run(QueryPlus.wrap(queryCopy), Maps.newHashMap()); + Sequence> queryResult = theRunner.run(QueryPlus.wrap(queryCopy), new HashMap<>()); List> results = queryResult.toList(); SelectResultValue result = results.get(0).getValue(); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java index 7971b549c47..50e0fe6e476 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/SqlBenchmark.java @@ -19,7 +19,6 @@ package org.apache.druid.benchmark.query; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo; @@ -66,6 +65,7 @@ import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import java.io.File; +import java.util.HashMap; import java.util.List; import java.util.concurrent.TimeUnit; @@ -170,7 +170,7 @@ public class SqlBenchmark @OutputTimeUnit(TimeUnit.MILLISECONDS) public void queryNative(Blackhole blackhole) { - final Sequence resultSequence = QueryPlus.wrap(groupByQuery).run(walker, Maps.newHashMap()); + final Sequence resultSequence = QueryPlus.wrap(groupByQuery).run(walker, new HashMap<>()); final List resultList = resultSequence.toList(); for (Row row : resultList) { diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java index 287db1acf34..ba5dd38578c 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/TimeseriesBenchmark.java @@ -20,8 +20,6 @@ package org.apache.druid.benchmark.query; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -91,6 +89,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -326,7 +325,7 @@ public class TimeseriesBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -389,7 +388,7 @@ public class TimeseriesBenchmark @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndex(Blackhole blackhole) { - List>> singleSegmentRunners = Lists.newArrayList(); + List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; @@ -410,7 +409,7 @@ public class TimeseriesBenchmark Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + new HashMap<>() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/TopNBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/TopNBenchmark.java index a1cddc735a6..36aae53ec11 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/TopNBenchmark.java @@ -20,8 +20,6 @@ package org.apache.druid.benchmark.query; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -88,6 +86,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -307,7 +306,7 @@ public class TopNBenchmark toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -350,7 +349,7 @@ public class TopNBenchmark @OutputTimeUnit(TimeUnit.MICROSECONDS) public void queryMultiQueryableIndex(Blackhole blackhole) { - List>> singleSegmentRunners = Lists.newArrayList(); + List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = factory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; @@ -371,7 +370,7 @@ public class TopNBenchmark Sequence> queryResult = theRunner.run( QueryPlus.wrap(query), - Maps.newHashMap() + new HashMap<>() ); List> results = queryResult.toList(); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java index 0604ec45f9b..63533a995a9 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java @@ -21,7 +21,6 @@ package org.apache.druid.benchmark.query.timecompare; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.benchmark.datagen.BenchmarkDataGenerator; @@ -353,7 +352,7 @@ public class TimeCompareBenchmark qIndexes.add(qIndex); } - List>> singleSegmentRunners = Lists.newArrayList(); + List>> singleSegmentRunners = new ArrayList<>(); QueryToolChest toolChest = topNFactory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; @@ -379,7 +378,7 @@ public class TimeCompareBenchmark ) ); - List>> singleSegmentRunnersT = Lists.newArrayList(); + List>> singleSegmentRunnersT = new ArrayList<>(); QueryToolChest toolChestT = timeseriesFactory.getToolchest(); for (int i = 0; i < numSegments; i++) { String segmentName = "qIndex" + i; diff --git a/codestyle/druid-forbidden-apis.txt b/codestyle/druid-forbidden-apis.txt index 49b00fd49ee..eaa0673ba53 100644 --- a/codestyle/druid-forbidden-apis.txt +++ b/codestyle/druid-forbidden-apis.txt @@ -1,14 +1,27 @@ -com.google.common.collect.MapMaker @ Create java.util.concurrent.ConcurrentHashMap directly -com.google.common.collect.Maps#newConcurrentMap() @ Create java.util.concurrent.ConcurrentHashMap directly -com.google.common.util.concurrent.Futures#transform(com.google.common.util.concurrent.ListenableFuture, com.google.common.util.concurrent.AsyncFunction) @ Use org.apache.druid.java.util.common.concurrent.ListenableFutures#transformAsync -com.google.common.collect.Iterators#emptyIterator() @ Use java.util.Collections#emptyIterator() -com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets instead -java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead -org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.TemporaryFolder for tests instead -java.util.LinkedList @ Use ArrayList or ArrayDeque instead -com.google.common.collect.Lists#newLinkedList() @ Use ArrayList or ArrayDeque instead -com.google.common.collect.Lists#newLinkedList(java.lang.Iterable) @ Use ArrayList or ArrayDeque instead -java.util.Random#() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!) +com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.core.type.TypeReference) @ Use ObjectMapper#readerFor instead com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.databind.JavaType) @ Use ObjectMapper#readerFor instead com.fasterxml.jackson.databind.ObjectMapper#reader(java.lang.Class) @ Use ObjectMapper#readerFor instead -com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.core.type.TypeReference) @ Use ObjectMapper#readerFor instead \ No newline at end of file +com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets instead +com.google.common.collect.Iterators#emptyIterator() @ Use java.util.Collections#emptyIterator() +com.google.common.collect.Lists#newArrayList() @ Create java.util.ArrayList directly +com.google.common.collect.Lists#newLinkedList() @ Use ArrayList or ArrayDeque instead +com.google.common.collect.Lists#newLinkedList(java.lang.Iterable) @ Use ArrayList or ArrayDeque instead +com.google.common.collect.MapMaker @ Create java.util.concurrent.ConcurrentHashMap directly +com.google.common.collect.Maps#newConcurrentMap() @ Create java.util.concurrent.ConcurrentHashMap directly +com.google.common.collect.Maps#newHashMap() @ Create java.util.HashMap directly +com.google.common.collect.Maps#newHashMap(java.util.Map) @ Create java.util.HashMap directly +com.google.common.collect.Maps#newTreeMap() @ Create java.util.TreeMap directly +com.google.common.collect.Maps#newTreeMap(java.util.Comparator) @ Create java.util.TreeMap directly +com.google.common.collect.Maps#newTreeMap(java.util.SortedMap) @ Create java.util.TreeMap directly +com.google.common.collect.Queues#newArrayDeque() @ Create java.util.ArrayDeque directly +com.google.common.collect.Queues#newConcurrentLinkedQueue() @ Create java.util.concurrent.ConcurrentLinkedQueue directly +com.google.common.collect.Queues#newLinkedBlockingQueue() @ Create java.util.concurrent.LinkedBlockingQueue directly +com.google.common.collect.Sets#newHashSet() @ Create java.util.HashSet directly +com.google.common.collect.Sets#newLinkedHashSet() @ Create java.util.LinkedHashSet directly +com.google.common.collect.Sets#newTreeSet() @ Create java.util.TreeSet directly +com.google.common.collect.Sets#newTreeSet(java.util.Comparator) @ Create java.util.TreeSet directly +com.google.common.util.concurrent.Futures#transform(com.google.common.util.concurrent.ListenableFuture, com.google.common.util.concurrent.AsyncFunction) @ Use org.apache.druid.java.util.common.concurrent.ListenableFutures#transformAsync +java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead +java.util.LinkedList @ Use ArrayList or ArrayDeque instead +java.util.Random#() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!) +org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.TemporaryFolder for tests instead \ No newline at end of file diff --git a/core/src/main/java/org/apache/druid/data/input/Rows.java b/core/src/main/java/org/apache/druid/data/input/Rows.java index c73b2bbec38..55e060edc0c 100644 --- a/core/src/main/java/org/apache/druid/data/input/Rows.java +++ b/core/src/main/java/org/apache/druid/data/input/Rows.java @@ -21,7 +21,6 @@ package org.apache.druid.data.input; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSortedSet; -import com.google.common.collect.Maps; import com.google.common.primitives.Longs; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.StringUtils; @@ -32,6 +31,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import java.util.stream.Collectors; /** @@ -47,7 +47,7 @@ public class Rows */ public static List toGroupKey(long timeStamp, InputRow inputRow) { - final Map> dims = Maps.newTreeMap(); + final Map> dims = new TreeMap<>(); for (final String dim : inputRow.getDimensions()) { final Set dimValues = ImmutableSortedSet.copyOf(inputRow.getDimension(dim)); if (dimValues.size() > 0) { diff --git a/core/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java b/core/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java index b3373a40248..ab23b4a32e2 100644 --- a/core/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java +++ b/core/src/main/java/org/apache/druid/data/input/impl/DimensionsSpec.java @@ -32,7 +32,9 @@ import org.apache.druid.guice.annotations.PublicApi; import org.apache.druid.java.util.common.parsers.ParserUtils; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -75,15 +77,15 @@ public class DimensionsSpec ) { this.dimensions = dimensions == null - ? Lists.newArrayList() + ? new ArrayList<>() : Lists.newArrayList(dimensions); this.dimensionExclusions = (dimensionExclusions == null) - ? Sets.newHashSet() + ? new HashSet<>() : Sets.newHashSet(dimensionExclusions); List spatialDims = (spatialDimensions == null) - ? Lists.newArrayList() + ? new ArrayList<>() : spatialDimensions; verify(spatialDims); diff --git a/core/src/main/java/org/apache/druid/guice/JsonConfigurator.java b/core/src/main/java/org/apache/druid/guice/JsonConfigurator.java index d08bd701245..ee83e8ab3f6 100644 --- a/core/src/main/java/org/apache/druid/guice/JsonConfigurator.java +++ b/core/src/main/java/org/apache/druid/guice/JsonConfigurator.java @@ -29,8 +29,6 @@ import com.google.common.base.Function; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.inject.Inject; import com.google.inject.ProvisionException; import com.google.inject.spi.Message; @@ -43,6 +41,7 @@ import javax.validation.Path; import javax.validation.Validator; import java.io.IOException; import java.lang.reflect.Field; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -76,7 +75,7 @@ public class JsonConfigurator // Make it end with a period so we only include properties with sub-object thingies. final String propertyBase = propertyPrefix.endsWith(".") ? propertyPrefix : propertyPrefix + "."; - Map jsonMap = Maps.newHashMap(); + Map jsonMap = new HashMap<>(); for (String prop : props.stringPropertyNames()) { if (prop.startsWith(propertyBase)) { final String propValue = props.getProperty(prop); @@ -109,7 +108,7 @@ public class JsonConfigurator final Set> violations = validator.validate(config); if (!violations.isEmpty()) { - List messages = Lists.newArrayList(); + List messages = new ArrayList<>(); for (ConstraintViolation violation : violations) { StringBuilder path = new StringBuilder(); diff --git a/core/src/main/java/org/apache/druid/guice/ListProvider.java b/core/src/main/java/org/apache/druid/guice/ListProvider.java index fb97128a73d..f4fd2b6dbe9 100644 --- a/core/src/main/java/org/apache/druid/guice/ListProvider.java +++ b/core/src/main/java/org/apache/druid/guice/ListProvider.java @@ -25,13 +25,14 @@ import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Provider; +import java.util.ArrayList; import java.util.List; /** */ public class ListProvider implements Provider> { - private final List> itemsToLoad = Lists.newArrayList(); + private final List> itemsToLoad = new ArrayList<>(); private Injector injector; public ListProvider add(Class clazz) diff --git a/core/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java b/core/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java index 67f63c7e620..8407759fc69 100644 --- a/core/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java +++ b/core/src/main/java/org/apache/druid/indexer/TaskMetricsUtils.java @@ -19,8 +19,7 @@ package org.apache.druid.indexer; -import com.google.common.collect.Maps; - +import java.util.HashMap; import java.util.Map; public class TaskMetricsUtils @@ -37,7 +36,7 @@ public class TaskMetricsUtils long rowsThrownAway ) { - Map metricsMap = Maps.newHashMap(); + Map metricsMap = new HashMap<>(); metricsMap.put(ROWS_PROCESSED, rowsProcessed); metricsMap.put(ROWS_PROCESSED_WITH_ERRORS, rowsProcessedWithErrors); metricsMap.put(ROWS_UNPARSEABLE, rowsUnparseable); diff --git a/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java b/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java index dca41e4261b..27af2697639 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java +++ b/core/src/main/java/org/apache/druid/java/util/common/FileUtils.java @@ -21,7 +21,6 @@ package org.apache.druid.java.util.common; import com.google.common.base.Predicate; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.io.ByteSource; import com.google.common.io.Files; @@ -34,6 +33,7 @@ import java.io.OutputStream; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.StandardCopyOption; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.UUID; @@ -91,7 +91,7 @@ public class FileUtils */ public static class FileCopyResult { - private final Collection files = Lists.newArrayList(); + private final Collection files = new ArrayList<>(); private long size = 0L; public Collection getFiles() diff --git a/core/src/main/java/org/apache/druid/java/util/common/JodaUtils.java b/core/src/main/java/org/apache/druid/java/util/common/JodaUtils.java index 75998cd7d35..a2256936b83 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/JodaUtils.java +++ b/core/src/main/java/org/apache/druid/java/util/common/JodaUtils.java @@ -19,8 +19,6 @@ package org.apache.druid.java.util.common; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.guava.Comparators; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -28,6 +26,7 @@ import org.joda.time.Interval; import java.util.ArrayList; import java.util.Iterator; import java.util.SortedSet; +import java.util.TreeSet; /** */ @@ -39,21 +38,21 @@ public class JodaUtils public static ArrayList condenseIntervals(Iterable intervals) { - ArrayList retVal = Lists.newArrayList(); + ArrayList retVal = new ArrayList<>(); final SortedSet sortedIntervals; if (intervals instanceof SortedSet) { sortedIntervals = (SortedSet) intervals; } else { - sortedIntervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); + sortedIntervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); for (Interval interval : intervals) { sortedIntervals.add(interval); } } if (sortedIntervals.isEmpty()) { - return Lists.newArrayList(); + return new ArrayList<>(); } Iterator intervalsIter = sortedIntervals.iterator(); @@ -82,8 +81,8 @@ public class JodaUtils public static Interval umbrellaInterval(Iterable intervals) { - ArrayList startDates = Lists.newArrayList(); - ArrayList endDates = Lists.newArrayList(); + ArrayList startDates = new ArrayList<>(); + ArrayList endDates = new ArrayList<>(); for (Interval interval : intervals) { startDates.add(interval.getStart()); diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java index 419ceae67e1..e2b7c33e578 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java +++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java @@ -20,7 +20,6 @@ package org.apache.druid.java.util.common.granularity; import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import org.apache.druid.java.util.common.Cacheable; import org.apache.druid.java.util.common.DateTimes; @@ -31,6 +30,7 @@ import org.joda.time.DateTimeZone; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; @@ -78,7 +78,7 @@ public abstract class Granularity implements Cacheable public static List granularitiesFinerThan(final Granularity gran0) { - final List retVal = Lists.newArrayList(); + final List retVal = new ArrayList<>(); final DateTime origin = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getOrigin() : null; final DateTimeZone tz = (gran0 instanceof PeriodGranularity) ? ((PeriodGranularity) gran0).getTimeZone() : null; for (GranularityType gran : GranularityType.values()) { diff --git a/core/src/main/java/org/apache/druid/java/util/common/guava/MergeIterable.java b/core/src/main/java/org/apache/druid/java/util/common/guava/MergeIterable.java index f57d31b1849..21c36de1321 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/guava/MergeIterable.java +++ b/core/src/main/java/org/apache/druid/java/util/common/guava/MergeIterable.java @@ -19,8 +19,7 @@ package org.apache.druid.java.util.common.guava; -import com.google.common.collect.Lists; - +import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; @@ -44,7 +43,7 @@ public class MergeIterable implements Iterable @Override public Iterator iterator() { - List> iterators = Lists.newArrayList(); + List> iterators = new ArrayList<>(); for (Iterable baseIterable : baseIterables) { iterators.add(baseIterable.iterator()); } diff --git a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java index 887bf9e47ed..a81fc0a9725 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java +++ b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java @@ -21,8 +21,6 @@ package org.apache.druid.java.util.common.io.smoosh; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.java.util.common.IAE; @@ -50,6 +48,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.TreeMap; /** * A class that concatenates files together into configurable sized chunks, @@ -77,12 +76,12 @@ public class FileSmoosher implements Closeable private final File baseDir; private final int maxChunkSize; - private final List outFiles = Lists.newArrayList(); - private final Map internalFiles = Maps.newTreeMap(); + private final List outFiles = new ArrayList<>(); + private final Map internalFiles = new TreeMap<>(); // list of files completed writing content using delegated smooshedWriter. - private List completedFiles = Lists.newArrayList(); + private List completedFiles = new ArrayList<>(); // list of files in process writing content using delegated smooshedWriter. - private List filesInProcess = Lists.newArrayList(); + private List filesInProcess = new ArrayList<>(); private Outer currOut = null; private boolean writerCurrentlyInUse = false; @@ -250,7 +249,7 @@ public class FileSmoosher implements Closeable { // Get processed elements from the stack and write. List fileToProcess = new ArrayList<>(completedFiles); - completedFiles = Lists.newArrayList(); + completedFiles = new ArrayList<>(); for (File file : fileToProcess) { add(file); if (!file.delete()) { diff --git a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/SmooshedFileMapper.java b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/SmooshedFileMapper.java index 763734798f6..5921ee098e7 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/SmooshedFileMapper.java +++ b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/SmooshedFileMapper.java @@ -20,7 +20,6 @@ package org.apache.druid.java.util.common.io.smoosh; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Closeables; import com.google.common.io.Files; import org.apache.druid.java.util.common.ByteBufferUtils; @@ -35,9 +34,11 @@ import java.io.InputStreamReader; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** * Class that works in conjunction with FileSmoosher. This class knows how to map in a set of files smooshed @@ -72,7 +73,7 @@ public class SmooshedFileMapper implements Closeable outFiles.add(FileSmoosher.makeChunkFile(baseDir, i)); } - Map internalFiles = Maps.newTreeMap(); + Map internalFiles = new TreeMap<>(); while ((line = in.readLine()) != null) { splits = line.split(","); @@ -94,7 +95,7 @@ public class SmooshedFileMapper implements Closeable private final List outFiles; private final Map internalFiles; - private final List buffersList = Lists.newArrayList(); + private final List buffersList = new ArrayList<>(); private SmooshedFileMapper( List outFiles, diff --git a/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java b/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java index 5febd0c8ca2..70a81cb20c8 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java +++ b/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java @@ -30,6 +30,7 @@ import org.apache.druid.java.util.common.StringUtils; import java.nio.charset.CharsetEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; @@ -90,7 +91,7 @@ public class JSONToLowerParser implements Parser } this.exclude = exclude != null ? Sets.newHashSet(Iterables.transform(exclude, StringUtils::toLowerCase)) - : Sets.newHashSet(); + : new HashSet<>(); } @Override diff --git a/core/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java b/core/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java index 48cbfe49f24..bfe732c7028 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java +++ b/core/src/main/java/org/apache/druid/java/util/common/parsers/ParserUtils.java @@ -21,7 +21,6 @@ package org.apache.druid.java.util.common.parsers; import com.google.common.base.Function; import com.google.common.base.Splitter; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.StringUtils; import org.joda.time.DateTimeZone; @@ -29,6 +28,7 @@ import org.joda.time.DateTimeZone; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.TimeZone; @@ -79,8 +79,8 @@ public class ParserUtils public static Set findDuplicates(Iterable fieldNames) { - Set duplicates = Sets.newHashSet(); - Set uniqueNames = Sets.newHashSet(); + Set duplicates = new HashSet<>(); + Set uniqueNames = new HashSet<>(); for (String fieldName : fieldNames) { String next = StringUtils.toLowerCase(fieldName); diff --git a/core/src/main/java/org/apache/druid/java/util/common/parsers/RegexParser.java b/core/src/main/java/org/apache/druid/java/util/common/parsers/RegexParser.java index e5371e89d61..fa38b0e954d 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/parsers/RegexParser.java +++ b/core/src/main/java/org/apache/druid/java/util/common/parsers/RegexParser.java @@ -27,6 +27,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.druid.java.util.common.collect.Utils; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.regex.Matcher; @@ -93,7 +94,7 @@ public class RegexParser implements Parser throw new ParseException("Incorrect Regex: %s . No match found.", pattern); } - List values = Lists.newArrayList(); + List values = new ArrayList<>(); for (int i = 1; i <= matcher.groupCount(); i++) { values.add(matcher.group(i)); } diff --git a/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java b/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java index 093550334ed..b7b4423f186 100644 --- a/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java +++ b/core/src/main/java/org/apache/druid/java/util/emitter/core/Emitters.java @@ -20,7 +20,6 @@ package org.apache.druid.java.util.emitter.core; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; @@ -51,7 +50,7 @@ public class Emitters Lifecycle lifecycle ) { - Map jsonified = Maps.newHashMap(); + Map jsonified = new HashMap<>(); if (props.getProperty(LOG_EMITTER_PROP) != null) { jsonified = makeLoggingMap(props); jsonified.put("type", "logging"); @@ -75,7 +74,7 @@ public class Emitters static Map makeHttpMap(Properties props) { - Map httpMap = Maps.newHashMap(); + Map httpMap = new HashMap<>(); final String urlProperty = "org.apache.druid.java.util.emitter.http.url"; @@ -133,7 +132,7 @@ public class Emitters // Package-visible for unit tests static Map makeLoggingMap(Properties props) { - Map loggingMap = Maps.newHashMap(); + Map loggingMap = new HashMap<>(); loggingMap.put( "loggerClass", props.getProperty("org.apache.druid.java.util.emitter.logging.class", LoggingEmitter.class.getName()) @@ -146,7 +145,7 @@ public class Emitters static Map makeCustomFactoryMap(Properties props) { - Map factoryMap = Maps.newHashMap(); + Map factoryMap = new HashMap<>(); String prefix = "org.apache.druid.java.util.emitter."; for (Map.Entry entry : props.entrySet()) { diff --git a/core/src/main/java/org/apache/druid/java/util/emitter/service/ServiceMetricEvent.java b/core/src/main/java/org/apache/druid/java/util/emitter/service/ServiceMetricEvent.java index 896330e0aaf..d50ddf89959 100644 --- a/core/src/main/java/org/apache/druid/java/util/emitter/service/ServiceMetricEvent.java +++ b/core/src/main/java/org/apache/druid/java/util/emitter/service/ServiceMetricEvent.java @@ -31,6 +31,7 @@ import org.joda.time.DateTime; import java.util.Arrays; import java.util.Map; +import java.util.TreeMap; /** */ @@ -130,7 +131,7 @@ public class ServiceMetricEvent implements Event public static class Builder { - private final Map userDims = Maps.newTreeMap(); + private final Map userDims = new TreeMap<>(); private String feed = "metrics"; public Builder setFeed(String feed) diff --git a/core/src/main/java/org/apache/druid/java/util/http/client/Request.java b/core/src/main/java/org/apache/druid/java/util/http/client/Request.java index 1084110cb52..f9f73f23c79 100644 --- a/core/src/main/java/org/apache/druid/java/util/http/client/Request.java +++ b/core/src/main/java/org/apache/druid/java/util/http/client/Request.java @@ -20,8 +20,6 @@ package org.apache.druid.java.util.http.client; import com.google.common.base.Supplier; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; import org.apache.druid.java.util.common.StringUtils; @@ -35,8 +33,10 @@ import org.jboss.netty.handler.codec.http.HttpMethod; import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -49,13 +49,13 @@ public class Request private final HttpMethod method; private final URL url; private final Multimap headers = Multimaps.newListMultimap( - Maps.newHashMap(), + new HashMap<>(), new Supplier>() { @Override public List get() { - return Lists.newArrayList(); + return new ArrayList<>(); } } ); diff --git a/core/src/main/java/org/apache/druid/math/expr/Parser.java b/core/src/main/java/org/apache/druid/math/expr/Parser.java index 9c11d2bc71b..861fc3c2d92 100644 --- a/core/src/main/java/org/apache/druid/math/expr/Parser.java +++ b/core/src/main/java/org/apache/druid/math/expr/Parser.java @@ -24,8 +24,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.CommonTokenStream; import org.antlr.v4.runtime.tree.ParseTree; @@ -37,6 +35,8 @@ import org.apache.druid.math.expr.antlr.ExprParser; import javax.annotation.Nullable; import java.lang.reflect.Modifier; +import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -47,7 +47,7 @@ public class Parser private static final Map FUNCTIONS; static { - Map functionMap = Maps.newHashMap(); + Map functionMap = new HashMap<>(); for (Class clazz : Function.class.getClasses()) { if (!Modifier.isAbstract(clazz.getModifiers()) && Function.class.isAssignableFrom(clazz)) { try { @@ -132,7 +132,7 @@ public class Parser public static List findRequiredBindings(Expr expr) { - final Set found = Sets.newLinkedHashSet(); + final Set found = new LinkedHashSet<>(); expr.visit( new Expr.Visitor() { diff --git a/core/src/main/java/org/apache/druid/metadata/MetadataStorageTablesConfig.java b/core/src/main/java/org/apache/druid/metadata/MetadataStorageTablesConfig.java index e93ad299e31..766efabb526 100644 --- a/core/src/main/java/org/apache/druid/metadata/MetadataStorageTablesConfig.java +++ b/core/src/main/java/org/apache/druid/metadata/MetadataStorageTablesConfig.java @@ -21,9 +21,9 @@ package org.apache.druid.metadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.StringUtils; +import java.util.HashMap; import java.util.Map; /** @@ -39,9 +39,9 @@ public class MetadataStorageTablesConfig private static final String DEFAULT_BASE = "druid"; - private final Map entryTables = Maps.newHashMap(); - private final Map logTables = Maps.newHashMap(); - private final Map lockTables = Maps.newHashMap(); + private final Map entryTables = new HashMap<>(); + private final Map logTables = new HashMap<>(); + private final Map lockTables = new HashMap<>(); @JsonProperty("base") private final String base; diff --git a/core/src/main/java/org/apache/druid/timeline/VersionedIntervalTimeline.java b/core/src/main/java/org/apache/druid/timeline/VersionedIntervalTimeline.java index 480b1dcaa28..85855ad8ee0 100644 --- a/core/src/main/java/org/apache/druid/timeline/VersionedIntervalTimeline.java +++ b/core/src/main/java/org/apache/druid/timeline/VersionedIntervalTimeline.java @@ -23,10 +23,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.guava.Comparators; @@ -37,6 +34,8 @@ import org.joda.time.Interval; import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; @@ -75,7 +74,7 @@ public class VersionedIntervalTimeline implements Timel final NavigableMap incompletePartitionsTimeline = new TreeMap( Comparators.intervalsByStartThenEnd() ); - private final Map> allTimelineEntries = Maps.newHashMap(); + private final Map> allTimelineEntries = new HashMap<>(); private final Comparator versionComparator; @@ -311,11 +310,11 @@ public class VersionedIntervalTimeline implements Timel { try { lock.readLock().lock(); - Set> retVal = Sets.newHashSet(); + Set> retVal = new HashSet<>(); - Map> overShadowed = Maps.newHashMap(); + Map> overShadowed = new HashMap<>(); for (Map.Entry> versionEntry : allTimelineEntries.entrySet()) { - Map versionCopy = Maps.newHashMap(); + Map versionCopy = new HashMap<>(); versionCopy.putAll(versionEntry.getValue()); overShadowed.put(versionEntry.getKey(), versionCopy); } @@ -529,7 +528,7 @@ public class VersionedIntervalTimeline implements Timel boolean incompleteOk ) { - List intervalsToRemove = Lists.newArrayList(); + List intervalsToRemove = new ArrayList<>(); TimelineEntry removed = timeline.get(interval); if (removed == null) { diff --git a/core/src/main/java/org/apache/druid/timeline/partition/PartitionHolder.java b/core/src/main/java/org/apache/druid/timeline/partition/PartitionHolder.java index 5e5f676e42f..396b4f3bdd6 100644 --- a/core/src/main/java/org/apache/druid/timeline/partition/PartitionHolder.java +++ b/core/src/main/java/org/apache/druid/timeline/partition/PartitionHolder.java @@ -21,7 +21,6 @@ package org.apache.druid.timeline.partition; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Sets; import java.util.Iterator; import java.util.List; @@ -38,13 +37,13 @@ public class PartitionHolder implements Iterable> public PartitionHolder(PartitionChunk initialChunk) { - this.holderSet = Sets.newTreeSet(); + this.holderSet = new TreeSet<>(); add(initialChunk); } public PartitionHolder(List> initialChunks) { - this.holderSet = Sets.newTreeSet(); + this.holderSet = new TreeSet<>(); for (PartitionChunk chunk : initialChunks) { add(chunk); } @@ -52,7 +51,7 @@ public class PartitionHolder implements Iterable> public PartitionHolder(PartitionHolder partitionHolder) { - this.holderSet = Sets.newTreeSet(); + this.holderSet = new TreeSet<>(); this.holderSet.addAll(partitionHolder.holderSet); } diff --git a/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java b/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java index c2d6539c040..19a21f2c089 100644 --- a/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java +++ b/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java @@ -20,7 +20,6 @@ package org.apache.druid.collections; import com.google.common.base.Suppliers; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.concurrent.Execs; import org.junit.After; import org.junit.Before; @@ -28,6 +27,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -153,7 +153,7 @@ public class BlockingPoolTest @Override public List> call() { - List> result = Lists.newArrayList(); + List> result = new ArrayList<>(); for (int i = 0; i < limit1; i++) { result.add(pool.take(10)); } @@ -167,7 +167,7 @@ public class BlockingPoolTest @Override public List> call() { - List> result = Lists.newArrayList(); + List> result = new ArrayList<>(); for (int i = 0; i < limit2; i++) { result.add(pool.take(10)); } diff --git a/core/src/test/java/org/apache/druid/collections/CombiningIterableTest.java b/core/src/test/java/org/apache/druid/collections/CombiningIterableTest.java index 4208c7c1de0..bafacbd3679 100644 --- a/core/src/test/java/org/apache/druid/collections/CombiningIterableTest.java +++ b/core/src/test/java/org/apache/druid/collections/CombiningIterableTest.java @@ -20,7 +20,6 @@ package org.apache.druid.collections; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import org.junit.Assert; @@ -42,7 +41,7 @@ public class CombiningIterableTest Set mergedLists = new HashSet<>(); mergedLists.addAll(firstList); mergedLists.addAll(secondList); - ArrayList> iterators = Lists.newArrayList(); + ArrayList> iterators = new ArrayList<>(); iterators.add(firstList); iterators.add(secondList); CombiningIterable actualIterable = CombiningIterable.createSplatted( diff --git a/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java b/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java index 0cfaad01366..c636e9d7d96 100644 --- a/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java +++ b/core/src/test/java/org/apache/druid/collections/OrderedMergeIteratorTest.java @@ -36,7 +36,7 @@ public class OrderedMergeIteratorTest @Test public void testSanity() { - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); iterators.add(Arrays.asList(1, 3, 5, 7, 9).iterator()); iterators.add(Arrays.asList(2, 8).iterator()); iterators.add(Arrays.asList(4, 6, 8).iterator()); @@ -52,7 +52,7 @@ public class OrderedMergeIteratorTest @Test public void testScrewsUpOnOutOfOrderBeginningOfList() { - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); iterators.add(Arrays.asList(1, 3, 5, 7, 9).iterator()); iterators.add(Arrays.asList(4, 6).iterator()); iterators.add(Arrays.asList(2, 8).iterator()); @@ -68,7 +68,7 @@ public class OrderedMergeIteratorTest @Test public void testScrewsUpOnOutOfOrderInList() { - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); iterators.add(Arrays.asList(1, 3, 5, 4, 7, 9).iterator()); iterators.add(Arrays.asList(2, 8).iterator()); iterators.add(Arrays.asList(4, 6).iterator()); @@ -86,7 +86,7 @@ public class OrderedMergeIteratorTest { final boolean[] done = new boolean[]{false, false}; - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); iterators.add( new IteratorShell(Arrays.asList(1, 2, 3).iterator()) { @@ -170,7 +170,7 @@ public class OrderedMergeIteratorTest @Test(expected = NoSuchElementException.class) public void testNoElementInNext() { - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); OrderedMergeIterator iter = new OrderedMergeIterator( Ordering.natural(), iterators.iterator() @@ -181,7 +181,7 @@ public class OrderedMergeIteratorTest @Test(expected = UnsupportedOperationException.class) public void testRemove() { - final ArrayList> iterators = Lists.newArrayList(); + final ArrayList> iterators = new ArrayList<>(); OrderedMergeIterator iter = new OrderedMergeIterator( Ordering.natural(), iterators.iterator() diff --git a/core/src/test/java/org/apache/druid/collections/OrderedMergeSequenceTest.java b/core/src/test/java/org/apache/druid/collections/OrderedMergeSequenceTest.java index 4677be744fc..741ac7d6e2c 100644 --- a/core/src/test/java/org/apache/druid/collections/OrderedMergeSequenceTest.java +++ b/core/src/test/java/org/apache/druid/collections/OrderedMergeSequenceTest.java @@ -174,7 +174,7 @@ public class OrderedMergeSequenceTest { final boolean[] done = new boolean[]{false, false}; - final ArrayList> sequences = Lists.newArrayList(); + final ArrayList> sequences = new ArrayList<>(); sequences.add( new BaseSequence>( new BaseSequence.IteratorMaker>() diff --git a/core/src/test/java/org/apache/druid/data/input/impl/CSVParseSpecTest.java b/core/src/test/java/org/apache/druid/data/input/impl/CSVParseSpecTest.java index f85ad2bbf50..f5fa4ee1b9b 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/CSVParseSpecTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/CSVParseSpecTest.java @@ -19,9 +19,9 @@ package org.apache.druid.data.input.impl; -import com.google.common.collect.Lists; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -39,8 +39,8 @@ public class CSVParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", Collections.singletonList("a"), @@ -61,8 +61,8 @@ public class CSVParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", Collections.singletonList("a"), diff --git a/core/src/test/java/org/apache/druid/data/input/impl/DelimitedParseSpecTest.java b/core/src/test/java/org/apache/druid/data/input/impl/DelimitedParseSpecTest.java index f7c18f9e270..9b0786228d6 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/DelimitedParseSpecTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/DelimitedParseSpecTest.java @@ -20,12 +20,12 @@ package org.apache.druid.data.input.impl; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import org.apache.druid.TestObjectMapper; import org.junit.Assert; import org.junit.Test; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -70,8 +70,8 @@ public class DelimitedParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", " ", @@ -93,8 +93,8 @@ public class DelimitedParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a,", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", null, @@ -116,8 +116,8 @@ public class DelimitedParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", null, diff --git a/core/src/test/java/org/apache/druid/data/input/impl/FileIteratingFirehoseTest.java b/core/src/test/java/org/apache/druid/data/input/impl/FileIteratingFirehoseTest.java index 94a9dc3fcc5..09f5dede94d 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/FileIteratingFirehoseTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/FileIteratingFirehoseTest.java @@ -21,7 +21,6 @@ package org.apache.druid.data.input.impl; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.commons.io.LineIterator; import org.apache.druid.data.input.InputRow; import org.junit.Assert; @@ -112,7 +111,7 @@ public class FileIteratingFirehoseTest .collect(Collectors.toList()); try (final FileIteratingFirehose firehose = new FileIteratingFirehose(lineIterators.iterator(), parser)) { - final List results = Lists.newArrayList(); + final List results = new ArrayList<>(); while (firehose.hasMore()) { final InputRow inputRow = firehose.nextRow(); diff --git a/core/src/test/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpecTest.java b/core/src/test/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpecTest.java index 87b52e8d093..72c320f4939 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpecTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/JSONLowercaseParseSpecTest.java @@ -19,11 +19,11 @@ package org.apache.druid.data.input.impl; -import com.google.common.collect.Lists; import junit.framework.Assert; import org.apache.druid.java.util.common.parsers.Parser; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Map; @@ -40,8 +40,8 @@ public class JSONLowercaseParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("A", "B")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ) ); Parser parser = spec.makeParser(); diff --git a/core/src/test/java/org/apache/druid/data/input/impl/ParseSpecTest.java b/core/src/test/java/org/apache/druid/data/input/impl/ParseSpecTest.java index 424dec57f13..1c2e7808e17 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/ParseSpecTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/ParseSpecTest.java @@ -35,6 +35,7 @@ import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -70,8 +71,8 @@ public class ParseSpecTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "b", "a")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", " ", @@ -94,7 +95,7 @@ public class ParseSpecTest new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("a", "B")), Collections.singletonList("B"), - Lists.newArrayList() + new ArrayList<>() ), ",", null, @@ -117,7 +118,7 @@ public class ParseSpecTest new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Collections.singletonList("a")), Lists.newArrayList("B", "B"), - Lists.newArrayList() + new ArrayList<>() ), ",", null, @@ -138,7 +139,7 @@ public class ParseSpecTest new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Collections.singletonList("a")), Lists.newArrayList("B", "B"), - Lists.newArrayList() + new ArrayList<>() ), ",", null, diff --git a/core/src/test/java/org/apache/druid/data/input/impl/SqlFirehoseTest.java b/core/src/test/java/org/apache/druid/data/input/impl/SqlFirehoseTest.java index 1ebfd531c08..f1c3d62481a 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/SqlFirehoseTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/SqlFirehoseTest.java @@ -25,7 +25,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.prefetch.JsonIterator; @@ -105,7 +104,7 @@ public class SqlFirehoseTest .collect(Collectors.toList()); try (final SqlFirehose firehose = new SqlFirehose(lineIterators.iterator(), parser, closeable)) { - final List results = Lists.newArrayList(); + final List results = new ArrayList<>(); while (firehose.hasMore()) { final InputRow inputRow = firehose.nextRow(); diff --git a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java index 9c3508e8e0d..c1d1b318d00 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java @@ -21,7 +21,6 @@ package org.apache.druid.data.input.impl.prefetch; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; -import com.google.common.collect.Lists; import com.google.common.io.CountingOutputStream; import org.apache.commons.io.FileUtils; import org.apache.commons.io.filefilter.TrueFileFilter; @@ -74,8 +73,8 @@ public class PrefetchableTextFilesFirehoseFactoryTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", Arrays.asList("timestamp", "a", "b"), diff --git a/core/src/test/java/org/apache/druid/java/util/common/guava/FilteredSequenceTest.java b/core/src/test/java/org/apache/druid/java/util/common/guava/FilteredSequenceTest.java index 09be6fca849..a7d9aebab54 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/guava/FilteredSequenceTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/guava/FilteredSequenceTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import org.apache.druid.java.util.common.StringUtils; import org.junit.Test; +import java.util.ArrayList; import java.util.List; /** @@ -44,7 +45,7 @@ public class FilteredSequenceTest }; for (int i = 0; i < 25; ++i) { - List vals = Lists.newArrayList(); + List vals = new ArrayList<>(); for (int j = 0; j < i; ++j) { vals.add(j); } diff --git a/core/src/test/java/org/apache/druid/java/util/common/guava/MappedSequenceTest.java b/core/src/test/java/org/apache/druid/java/util/common/guava/MappedSequenceTest.java index 14f31c4bbfb..1464615b9d4 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/guava/MappedSequenceTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/guava/MappedSequenceTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.Lists; import org.apache.druid.java.util.common.StringUtils; import org.junit.Test; +import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -43,7 +44,7 @@ public class MappedSequenceTest }; for (int i = 4; i < 5; ++i) { - List vals = Lists.newArrayList(); + List vals = new ArrayList<>(); for (int j = 0; j < i; ++j) { vals.add(j); } diff --git a/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java b/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java index 454909a44db..b1be51b3b4e 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java @@ -166,8 +166,8 @@ public class LifecycleTest { Lifecycle lifecycle = new Lifecycle(); - List startOrder = Lists.newArrayList(); - List stopOrder = Lists.newArrayList(); + List startOrder = new ArrayList<>(); + List stopOrder = new ArrayList<>(); lifecycle.addManagedInstance(new ObjectToBeLifecycled(0, startOrder, stopOrder)); lifecycle.addManagedInstance(new ObjectToBeLifecycled(1, startOrder, stopOrder), Lifecycle.Stage.NORMAL); @@ -198,8 +198,8 @@ public class LifecycleTest { final Lifecycle lifecycle = new Lifecycle(); - final List startOrder = Lists.newArrayList(); - final List stopOrder = Lists.newArrayList(); + final List startOrder = new ArrayList<>(); + final List stopOrder = new ArrayList<>(); lifecycle.addManagedInstance(new ObjectToBeLifecycled(0, startOrder, stopOrder)); lifecycle.addHandler( diff --git a/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java b/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java index 59a71ff9dbc..6d2b22e68f1 100644 --- a/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java +++ b/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import org.apache.druid.TestObjectMapper; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.DateTimes; @@ -45,6 +44,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; /** */ @@ -241,7 +241,7 @@ public class DataSegmentTest List shuffled = new ArrayList<>(Arrays.asList(sortedOrder)); Collections.shuffle(shuffled); - Set theSet = Sets.newTreeSet(DataSegment.bucketMonthComparator()); + Set theSet = new TreeSet<>(DataSegment.bucketMonthComparator()); theSet.addAll(shuffled); int index = 0; diff --git a/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java b/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java index fcd38f092c3..bb90c28d44d 100644 --- a/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java +++ b/examples/src/main/java/org/apache/druid/examples/wikipedia/WikipediaIrcDecoder.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.maxmind.geoip2.DatabaseReader; import com.maxmind.geoip2.exception.GeoIp2Exception; import com.maxmind.geoip2.model.Omni; @@ -38,6 +37,7 @@ import java.io.IOException; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; @@ -88,7 +88,7 @@ class WikipediaIrcDecoder implements IrcDecoder ) { if (namespaces == null) { - namespaces = Maps.newHashMap(); + namespaces = new HashMap<>(); } this.namespaces = namespaces; this.geoIpDatabase = geoIpDatabase; @@ -177,8 +177,8 @@ class WikipediaIrcDecoder implements IrcDecoder @Override public InputRow decodeMessage(final DateTime timestamp, String channel, String msg) { - final Map dimensions = Maps.newHashMap(); - final Map metrics = Maps.newHashMap(); + final Map dimensions = new HashMap<>(); + final Map metrics = new HashMap<>(); Matcher m = pattern.matcher(msg); if (!m.matches()) { diff --git a/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java b/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java index e270015e193..1ee347ea67a 100644 --- a/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java +++ b/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetIntersectionTest.java @@ -19,7 +19,6 @@ package org.apache.druid.extendedset.intset; -import com.google.common.collect.Lists; import junit.framework.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -97,7 +96,7 @@ public class ImmutableConciseSetIntersectionTest @Test public void testIntersection3() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); ConciseSet set2 = new ConciseSet(); for (int i = 0; i < 1000; i++) { @@ -115,7 +114,7 @@ public class ImmutableConciseSetIntersectionTest @Test public void testIntersection4() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); ConciseSet set2 = new ConciseSet(); for (int i = 0; i < 1000; i++) { @@ -137,7 +136,7 @@ public class ImmutableConciseSetIntersectionTest { final int[] ints1 = {33, 100000}; final int[] ints2 = {34, 200000}; - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i : ints1) { @@ -158,7 +157,7 @@ public class ImmutableConciseSetIntersectionTest @Test public void testIntersection6() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 5; i++) { set1.add(i); @@ -197,7 +196,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(100); expected.add(500); for (int i = 600; i < 700; i++) { @@ -224,7 +223,7 @@ public class ImmutableConciseSetIntersectionTest } set2.add(4001); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(100); expected.add(500); for (int i = 600; i < 700; i++) { @@ -248,7 +247,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(2005); expected.add(3005); @@ -269,7 +268,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(600); set2.add(4001); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(500); expected.add(600); @@ -290,7 +289,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(2005); for (int i = 2800; i < 3007; i++) { expected.add(i); @@ -315,7 +314,7 @@ public class ImmutableConciseSetIntersectionTest } set2.add(10005); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(2005); for (int i = 2800; i < 3007; i++) { expected.add(i); @@ -336,7 +335,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); verifyIntersection(expected, set1, set2); } @@ -356,7 +355,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(100); set2.add(101); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(0); expected.add(3); expected.add(5); @@ -382,7 +381,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(0); expected.add(3); expected.add(5); @@ -406,7 +405,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(100); set2.add(101); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); verifyIntersection(expected, set1, set2); } @@ -422,7 +421,7 @@ public class ImmutableConciseSetIntersectionTest ConciseSet set2 = new ConciseSet(); set2.add(4001); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(4001); verifyIntersection(expected, set1, set2); @@ -441,7 +440,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); for (int i = 32; i < 62; i++) { expected.add(i); } @@ -460,7 +459,7 @@ public class ImmutableConciseSetIntersectionTest set2.add(i); } - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); expected.add(2005); verifyIntersection(expected, set1, set2); @@ -583,7 +582,7 @@ public class ImmutableConciseSetIntersectionTest private void verifyIntersection(List expected, List sets) { - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); ImmutableConciseSet set = ImmutableConciseSet.intersection(sets); IntSet.IntIterator itr = set.iterator(); while (itr.hasNext()) { diff --git a/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetTest.java b/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetTest.java index b7d25bdfac9..a4156d235fa 100755 --- a/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetTest.java +++ b/extendedset/src/test/java/org/apache/druid/extendedset/intset/ImmutableConciseSetTest.java @@ -19,7 +19,6 @@ package org.apache.druid.extendedset.intset; -import com.google.common.collect.Lists; import junit.framework.Assert; import org.apache.druid.java.util.common.StringUtils; import org.junit.Test; @@ -476,7 +475,7 @@ public class ImmutableConciseSetTest @Test public void testUnion3() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 62; i < 10001; i++) { set1.add(i); @@ -506,7 +505,7 @@ public class ImmutableConciseSetTest @Test public void testUnion4() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 63; i < 1001; i++) { set1.add(i); @@ -578,7 +577,7 @@ public class ImmutableConciseSetTest @Test public void testUnion6() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 30; i++) { if (i != 28) { @@ -612,7 +611,7 @@ public class ImmutableConciseSetTest @Test public void testUnion7() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 64; i < 1005; i++) { set1.add(i); @@ -642,7 +641,7 @@ public class ImmutableConciseSetTest @Test public void testUnion8() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 1000; i++) { if (i != 27) { @@ -676,7 +675,7 @@ public class ImmutableConciseSetTest @Test public void testUnion9() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 1000; i++) { if (!(i == 27 || i == 28)) { @@ -712,7 +711,7 @@ public class ImmutableConciseSetTest @Test public void testUnion10() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 1000; i += 2) { set1.add(i); @@ -742,7 +741,7 @@ public class ImmutableConciseSetTest @Test public void testUnion11() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 1000; i += 2) { set1.add(i); @@ -802,7 +801,7 @@ public class ImmutableConciseSetTest @Test public void testUnion13() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int[] ints1 = {0}; ConciseSet set1 = new ConciseSet(); @@ -835,7 +834,7 @@ public class ImmutableConciseSetTest @Test public void testUnion14() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int[] ints1 = {0, 100}; ConciseSet set1 = new ConciseSet(); @@ -869,7 +868,7 @@ public class ImmutableConciseSetTest @Test public void testUnion15() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int[] ints1 = {1, 100}; final int[] ints2 = {0}; @@ -956,7 +955,7 @@ public class ImmutableConciseSetTest @Test public void testUnion18() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 1000; i++) { set1.add(i); @@ -985,7 +984,7 @@ public class ImmutableConciseSetTest @Test public void testUnion19() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 93; i++) { set1.add(i); @@ -1014,7 +1013,7 @@ public class ImmutableConciseSetTest @Test public void testUnion20() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set1 = new ConciseSet(); for (int i = 0; i < 5; i++) { set1.add(i); @@ -1064,7 +1063,7 @@ public class ImmutableConciseSetTest ImmutableConciseSet.newImmutableFromMutable(set2) ); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); for (int i = 0; i < 93; i++) { expected.add(i); } @@ -1090,7 +1089,7 @@ public class ImmutableConciseSetTest ImmutableConciseSet.newImmutableFromMutable(set2) ); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); for (int i = 0; i < 32; i++) { expected.add(i); } @@ -1130,7 +1129,7 @@ public class ImmutableConciseSetTest private void verifyUnion(List expected, List sets) { - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); ImmutableConciseSet set = ImmutableConciseSet.union(sets); IntSet.IntIterator itr = set.iterator(); while (itr.hasNext()) { @@ -1146,7 +1145,7 @@ public class ImmutableConciseSetTest public void testComplement1() { final int[] ints = {1, 100}; - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set = new ConciseSet(); for (int i : ints) { @@ -1170,7 +1169,7 @@ public class ImmutableConciseSetTest @Test public void testComplement2() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); ConciseSet set = new ConciseSet(); for (int i = 0; i < 15; i++) { @@ -1188,7 +1187,7 @@ public class ImmutableConciseSetTest @Test public void testComplement3() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 21; ConciseSet set = new ConciseSet(); @@ -1210,7 +1209,7 @@ public class ImmutableConciseSetTest @Test public void testComplement4() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 41; ConciseSet set = new ConciseSet(); @@ -1232,7 +1231,7 @@ public class ImmutableConciseSetTest @Test public void testComplement5() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 1001; ConciseSet set = new ConciseSet(); @@ -1254,7 +1253,7 @@ public class ImmutableConciseSetTest @Test public void testComplement6() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 1001; ConciseSet set = new ConciseSet(); @@ -1278,7 +1277,7 @@ public class ImmutableConciseSetTest @Test public void testComplement7() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 37; ConciseSet set = new ConciseSet(); @@ -1298,7 +1297,7 @@ public class ImmutableConciseSetTest @Test public void testComplement8() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 32; ConciseSet set = new ConciseSet(); @@ -1362,7 +1361,7 @@ public class ImmutableConciseSetTest @Test public void testComplement10() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); final int length = 93; for (int i = 0; i < length; i++) { @@ -1380,7 +1379,7 @@ public class ImmutableConciseSetTest @Test public void testComplement11() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); int length = 18930; for (int i = 0; i < 500; i++) { expected.add(i); @@ -1404,7 +1403,7 @@ public class ImmutableConciseSetTest @Test public void testComplement12() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); int length = 10; for (int i = 0; i < 10; i++) { expected.add(i); @@ -1421,7 +1420,7 @@ public class ImmutableConciseSetTest @Test public void testComplement13() { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); int length = 10; for (int i = 0; i < length; i++) { expected.add(i); @@ -1433,7 +1432,7 @@ public class ImmutableConciseSetTest private void verifyComplement(List expected, ImmutableConciseSet set, int endIndex) { - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); ImmutableConciseSet res; if (endIndex == NO_COMPLEMENT_LENGTH) { diff --git a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java b/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java index a45ef9552ab..746011a752c 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java +++ b/extensions-contrib/ambari-metrics-emitter/src/test/java/org/apache/druid/emitter/ambari/metrics/WhiteListBasedDruidToTimelineEventConverterTest.java @@ -19,7 +19,6 @@ package org.apache.druid.emitter.ambari.metrics; -import com.google.common.collect.Maps; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.commons.io.IOUtils; @@ -39,6 +38,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.HashMap; @RunWith(JUnitParamsRunner.class) @@ -60,7 +60,7 @@ public class WhiteListBasedDruidToTimelineEventConverterTest EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); + EasyMock.expect(event.getUserDims()).andReturn(new HashMap<>()).anyTimes(); EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); EasyMock.expect(event.getFeed()).andReturn("metrics").anyTimes(); } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java index 5e45c3f3a90..1981a78cb78 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -21,8 +21,6 @@ package org.apache.druid.storage.azure; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.microsoft.azure.storage.StorageException; import org.apache.druid.jackson.DefaultObjectMapper; @@ -41,6 +39,8 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.Map; import static org.easymock.EasyMock.expectLastCall; @@ -107,9 +107,9 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, size diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java index cfddc661654..8a2e3f697eb 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesDataSegmentPusherTest.java @@ -19,8 +19,6 @@ package org.apache.druid.storage.cloudfiles; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; @@ -35,6 +33,8 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; /** */ @@ -75,9 +75,9 @@ public class CloudFilesDataSegmentPusherTest "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, size diff --git a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentFinder.java b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentFinder.java index 888a2364bb4..25a43f14164 100644 --- a/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentFinder.java +++ b/extensions-contrib/google-extensions/src/main/java/org/apache/druid/storage/google/GoogleDataSegmentFinder.java @@ -25,7 +25,6 @@ import com.google.api.services.storage.Storage; import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import com.google.common.base.Throwables; -import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.segment.loading.DataSegmentFinder; @@ -36,6 +35,7 @@ import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,7 +63,7 @@ public class GoogleDataSegmentFinder implements DataSegmentFinder @Override public Set findSegments(String workingDirPath, boolean updateDescriptor) throws SegmentLoadingException { - final Set segments = Sets.newHashSet(); + final Set segments = new HashSet<>(); try { Storage.Objects.List listObjects = storage.list(config.getBucket()); diff --git a/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java index ba5d7be4873..cd57654bce6 100644 --- a/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-contrib/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -21,8 +21,6 @@ package org.apache.druid.storage.google; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; @@ -37,6 +35,8 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; +import java.util.ArrayList; +import java.util.HashMap; import static org.easymock.EasyMock.expectLastCall; @@ -78,9 +78,9 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), new NoneShardSpec(), 0, size diff --git a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java b/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java index ed91e0cccdb..eda7764231f 100644 --- a/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java +++ b/extensions-contrib/graphite-emitter/src/test/java/org/apache/druid/emitter/graphite/WhiteListBasedConverterTest.java @@ -19,7 +19,6 @@ package org.apache.druid.emitter.graphite; -import com.google.common.collect.Maps; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.apache.commons.io.IOUtils; @@ -38,6 +37,7 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.HashMap; @RunWith(JUnitParamsRunner.class) @@ -65,7 +65,7 @@ public class WhiteListBasedConverterTest EasyMock.expect(event.getHost()).andReturn(hostname).anyTimes(); EasyMock.expect(event.getService()).andReturn(serviceName).anyTimes(); EasyMock.expect(event.getCreatedTime()).andReturn(createdTime).anyTimes(); - EasyMock.expect(event.getUserDims()).andReturn(Maps.newHashMap()).anyTimes(); + EasyMock.expect(event.getUserDims()).andReturn(new HashMap<>()).anyTimes(); EasyMock.expect(event.getValue()).andReturn(10).anyTimes(); } diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java index afd135f81da..a68150a0273 100644 --- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java +++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/org/apache/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java @@ -22,7 +22,6 @@ package org.apache.druid.firehose.kafka; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; import com.google.common.io.Closeables; import org.apache.druid.data.input.ByteBufferInputRowParser; import org.apache.druid.data.input.Committer; @@ -38,6 +37,7 @@ import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -123,7 +123,7 @@ public class KafkaEightSimpleConsumerFirehoseFactory implements private Map loadOffsetFromPreviousMetaData(Object lastCommit) { - Map offsetMap = Maps.newHashMap(); + Map offsetMap = new HashMap<>(); if (lastCommit == null) { return offsetMap; } @@ -183,7 +183,7 @@ public class KafkaEightSimpleConsumerFirehoseFactory implements private volatile Iterator nextIterator = Collections.emptyIterator(); { - lastOffsetPartitions = Maps.newHashMap(); + lastOffsetPartitions = new HashMap<>(); lastOffsetPartitions.putAll(lastOffsets); } @@ -250,7 +250,7 @@ public class KafkaEightSimpleConsumerFirehoseFactory implements @Override public Committer makeCommitter() { - final Map offsets = Maps.newHashMap(lastOffsetPartitions); + final Map offsets = new HashMap<>(lastOffsetPartitions); return new Committer() { diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java index d4e7f08d657..41e58e9554a 100644 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java +++ b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisor.java @@ -22,10 +22,8 @@ package org.apache.druid.indexing.materializedview; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; @@ -55,10 +53,14 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; +import java.util.TreeMap; import java.util.concurrent.TimeUnit; public class MaterializedViewSupervisor implements Supervisor @@ -79,8 +81,8 @@ public class MaterializedViewSupervisor implements Supervisor private final String supervisorId; private final int maxTaskCount; private final long minDataLagMs; - private final Map runningTasks = Maps.newHashMap(); - private final Map runningVersion = Maps.newHashMap(); + private final Map runningTasks = new HashMap<>(); + private final Map runningVersion = new HashMap<>(); // taskLock is used to synchronize runningTask and runningVersion private final Object taskLock = new Object(); // stateLock is used to synchronize materializedViewSupervisor's status @@ -90,7 +92,7 @@ public class MaterializedViewSupervisor implements Supervisor private ListeningScheduledExecutorService exec = null; // In the missing intervals, baseDataSource has data but derivedDataSource does not, which means // data in these intervals of derivedDataSource needs to be rebuilt. - private Set missInterval = Sets.newHashSet(); + private Set missInterval = new HashSet<>(); public MaterializedViewSupervisor( TaskMaster taskMaster, @@ -325,13 +327,11 @@ public class MaterializedViewSupervisor implements Supervisor // use max created_date of base segments as the version of derivative segments Map maxCreatedDate = baseSegmentsSnapshot.lhs; Map derivativeVersion = derivativeSegmentsSnapshot.lhs; - SortedMap sortedToBuildInterval = Maps.newTreeMap( - Comparators.inverse(Comparators.intervalsByStartThenEnd()) - ); + SortedMap sortedToBuildInterval = new TreeMap<>(Comparators.inverse(Comparators.intervalsByStartThenEnd())); // find the intervals to drop and to build MapDifference difference = Maps.difference(maxCreatedDate, derivativeVersion); - Map toBuildInterval = Maps.newHashMap(difference.entriesOnlyOnLeft()); - Map toDropInterval = Maps.newHashMap(difference.entriesOnlyOnRight()); + Map toBuildInterval = new HashMap<>(difference.entriesOnlyOnLeft()); + Map toDropInterval = new HashMap<>(difference.entriesOnlyOnRight()); // if some intervals are in running tasks and the versions are the same, remove it from toBuildInterval // if some intervals are in running tasks, but the versions are different, stop the task. for (Interval interval : runningVersion.keySet()) { @@ -390,12 +390,12 @@ public class MaterializedViewSupervisor implements Supervisor List snapshot ) { - Map versions = Maps.newHashMap(); - Map> segments = Maps.newHashMap(); + Map versions = new HashMap<>(); + Map> segments = new HashMap<>(); for (DataSegment segment : snapshot) { Interval interval = segment.getInterval(); versions.put(interval, segment.getVersion()); - segments.putIfAbsent(interval, Lists.newArrayList()); + segments.putIfAbsent(interval, new ArrayList<>()); segments.get(interval).add(segment); } return new Pair<>(versions, segments); @@ -410,8 +410,8 @@ public class MaterializedViewSupervisor implements Supervisor .map(DataSegment::getInterval) .max(Comparators.intervalsByStartThenEnd()) .get(); - Map maxCreatedDate = Maps.newHashMap(); - Map> segments = Maps.newHashMap(); + Map maxCreatedDate = new HashMap<>(); + Map> segments = new HashMap<>(); for (Pair entry : snapshot) { DataSegment segment = entry.lhs; String createDate = entry.rhs; @@ -426,7 +426,7 @@ public class MaterializedViewSupervisor implements Supervisor DateTimes.of(maxCreatedDate.getOrDefault(interval, DateTimes.MIN.toString())) ).toString() ); - segments.putIfAbsent(interval, Lists.newArrayList()); + segments.putIfAbsent(interval, new ArrayList<>()); segments.get(interval).add(segment); } return new Pair<>(maxCreatedDate, segments); diff --git a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java index f6f824c3f07..3a9a1137b31 100644 --- a/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java +++ b/extensions-contrib/materialized-view-maintenance/src/main/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpec.java @@ -25,8 +25,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.commons.codec.digest.DigestUtils; import org.apache.druid.data.input.impl.DimensionSchema; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -54,6 +52,8 @@ import org.apache.druid.server.security.AuthorizerMapper; import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -131,7 +131,7 @@ public class MaterializedViewSupervisorSpec implements SupervisorSpec this.hadoopCoordinates = hadoopCoordinates; this.hadoopDependencyCoordinates = hadoopDependencyCoordinates; this.classpathPrefix = classpathPrefix; - this.context = context == null ? Maps.newHashMap() : context; + this.context = context == null ? new HashMap<>() : context; this.objectMapper = objectMapper; this.taskMaster = taskMaster; this.taskStorage = taskStorage; @@ -143,11 +143,11 @@ public class MaterializedViewSupervisorSpec implements SupervisorSpec this.config = config; this.suspended = suspended != null ? suspended : false; - this.metrics = Sets.newHashSet(); + this.metrics = new HashSet<>(); for (AggregatorFactory aggregatorFactory : aggregators) { metrics.add(aggregatorFactory.getName()); } - this.dimensions = Sets.newHashSet(); + this.dimensions = new HashSet<>(); for (DimensionSchema schema : dimensionsSpec.getDimensions()) { dimensions.add(schema.getName()); } @@ -158,10 +158,10 @@ public class MaterializedViewSupervisorSpec implements SupervisorSpec String taskId = StringUtils.format("%s_%s_%s", TASK_PREFIX, dataSourceName, DateTimes.nowUtc()); // generate parser - Map parseSpec = Maps.newHashMap(); + Map parseSpec = new HashMap<>(); parseSpec.put("format", "timeAndDims"); parseSpec.put("dimensionsSpec", dimensionsSpec); - Map parser = Maps.newHashMap(); + Map parser = new HashMap<>(); parser.put("type", "map"); parser.put("parseSpec", parseSpec); @@ -221,7 +221,7 @@ public class MaterializedViewSupervisorSpec implements SupervisorSpec ); // generate HadoopIOConfig - Map inputSpec = Maps.newHashMap(); + Map inputSpec = new HashMap<>(); inputSpec.put("type", "dataSource"); inputSpec.put("ingestionSpec", datasourceIngestionSpec); HadoopIOConfig hadoopIOConfig = new HadoopIOConfig(inputSpec, null, null); diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java index 505c2665475..7b575f01dd7 100644 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java +++ b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.StringDimensionSchema; @@ -56,6 +55,7 @@ import org.junit.rules.ExpectedException; import java.io.IOException; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -156,7 +156,7 @@ public class MaterializedViewSupervisorTest expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); Pair, Map>> toBuildInterval = supervisor.checkSegments(); - Map> expectedSegments = Maps.newHashMap(); + Map> expectedSegments = new HashMap<>(); expectedSegments.put( Intervals.of("2015-01-01T00Z/2015-01-02T00Z"), Collections.singletonList( diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java index 6de44792b4d..bbd6aa8a4f2 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DataSourceOptimizer.java @@ -21,9 +21,6 @@ package org.apache.druid.query.materializedview; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.client.TimelineServerView; import org.apache.druid.query.Query; @@ -35,7 +32,10 @@ import org.apache.druid.query.topn.TopNQuery; import org.apache.druid.timeline.TimelineObjectHolder; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -100,7 +100,7 @@ public class DataSourceOptimizer // get all fields which the query required Set requiredFields = MaterializedViewUtils.getRequiredFields(query); - Set derivativesWithRequiredFields = Sets.newHashSet(); + Set derivativesWithRequiredFields = new HashSet<>(); for (DerivativeDataSource derivativeDataSource : derivatives) { derivativesHitCount.putIfAbsent(derivativeDataSource.getName(), new AtomicLong(0)); if (derivativeDataSource.getColumns().containsAll(requiredFields)) { @@ -116,7 +116,7 @@ public class DataSourceOptimizer return Collections.singletonList(query); } - List queries = Lists.newArrayList(); + List queries = new ArrayList<>(); List remainingQueryIntervals = (List) query.getIntervals(); for (DerivativeDataSource derivativeDataSource : ImmutableSortedSet.copyOf(derivativesWithRequiredFields)) { @@ -187,10 +187,10 @@ public class DataSourceOptimizer finally { lock.writeLock().unlock(); } - List stats = Lists.newArrayList(); + List stats = new ArrayList<>(); Map> baseToDerivatives = DerivativeDataSourceManager.getAllDerivatives(); for (Map.Entry> entry : baseToDerivatives.entrySet()) { - Map derivativesStat = Maps.newHashMap(); + Map derivativesStat = new HashMap<>(); for (DerivativeDataSource derivative : entry.getValue()) { derivativesStat.put( derivative.getName(), diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java index 606132c871d..45ab136e443 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningScheduledExecutorService; import com.google.common.util.concurrent.MoreExecutors; @@ -52,9 +51,11 @@ import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.SortedSet; +import java.util.TreeSet; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -144,7 +145,7 @@ public class DerivativeDataSourceManager public static ImmutableSet getDerivatives(String datasource) { - return ImmutableSet.copyOf(derivativesRef.get().getOrDefault(datasource, Sets.newTreeSet())); + return ImmutableSet.copyOf(derivativesRef.get().getOrDefault(datasource, new TreeSet<>())); } public static ImmutableMap> getAllDerivatives() @@ -199,7 +200,7 @@ public class DerivativeDataSourceManager ConcurrentHashMap> newDerivatives = new ConcurrentHashMap<>(); for (DerivativeDataSource derivative : derivativeDataSources) { - newDerivatives.putIfAbsent(derivative.getBaseDataSource(), Sets.newTreeSet()); + newDerivatives.putIfAbsent(derivative.getBaseDataSource(), new TreeSet<>()); newDerivatives.get(derivative.getBaseDataSource()).add(derivative); } ConcurrentHashMap> current; @@ -227,7 +228,7 @@ public class DerivativeDataSourceManager { return connector.retryWithHandle( new HandleCallback() { - Set intervals = Sets.newHashSet(); + Set intervals = new HashSet<>(); long totalSize = 0; @Override public Long withHandle(Handle handle) diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java index ea9e442c566..92eff78a031 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/MaterializedViewUtils.java @@ -19,7 +19,6 @@ package org.apache.druid.query.materializedview; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.query.Query; @@ -31,6 +30,7 @@ import org.apache.druid.query.timeseries.TimeseriesQuery; import org.apache.druid.query.topn.TopNQuery; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -106,7 +106,7 @@ public class MaterializedViewUtils } Iterator it1 = JodaUtils.condenseIntervals(interval1).iterator(); Iterator it2 = JodaUtils.condenseIntervals(interval2).iterator(); - List remaining = Lists.newArrayList(); + List remaining = new ArrayList<>(); Interval currInterval1 = it1.next(); Interval currInterval2 = it2.next(); long start1 = currInterval1.getStartMillis(); diff --git a/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java b/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java index c90834af612..96a7e0bcfdc 100644 --- a/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java +++ b/extensions-contrib/orc-extensions/src/main/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParser.java @@ -25,7 +25,6 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.impl.InputRowParser; @@ -50,6 +49,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.joda.time.DateTime; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -95,7 +95,7 @@ public class OrcHadoopInputRowParser implements InputRowParser @Override public List parseBatch(OrcStruct input) { - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); List fields = oip.getAllStructFieldRefs(); for (StructField field : fields) { ObjectInspector objectInspector = field.getFieldObjectInspector(); diff --git a/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java index 3c70531e64c..9f07a235e87 100644 --- a/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.InputRowParser; @@ -75,10 +74,12 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; @@ -322,11 +323,11 @@ public class OrcIndexGeneratorJobTest Integer[][][] shardInfoForEachShard ) { - Map> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance()); + Map> shardSpecs = new TreeMap<>(DateTimeComparator.getInstance()); int shardCount = 0; int segmentNum = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { - List specs = Lists.newArrayList(); + List specs = new ArrayList<>(); for (Integer[] shardInfo : shardInfoForEachShard[segmentNum++]) { specs.add(new HashBasedNumberedShardSpec(shardInfo[0], shardInfo[1], null, HadoopDruidIndexerConfig.JSON_MAPPER)); } diff --git a/extensions-contrib/parquet-extensions/src/main/java/org/apache/druid/data/input/parquet/ParquetHadoopInputRowParser.java b/extensions-contrib/parquet-extensions/src/main/java/org/apache/druid/data/input/parquet/ParquetHadoopInputRowParser.java index e592212f6f4..baa738389f3 100755 --- a/extensions-contrib/parquet-extensions/src/main/java/org/apache/druid/data/input/parquet/ParquetHadoopInputRowParser.java +++ b/extensions-contrib/parquet-extensions/src/main/java/org/apache/druid/data/input/parquet/ParquetHadoopInputRowParser.java @@ -21,7 +21,6 @@ package org.apache.druid.data.input.parquet; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.avro.LogicalType; import org.apache.avro.LogicalTypes; import org.apache.avro.Schema; @@ -36,6 +35,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.joda.time.DateTime; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; @@ -57,7 +57,7 @@ public class ParquetHadoopInputRowParser implements InputRowParser dimensionSchema = parseSpec.getDimensionsSpec().getDimensions(); - this.dimensions = Lists.newArrayList(); + this.dimensions = new ArrayList<>(); for (DimensionSchema dim : dimensionSchema) { this.dimensions.add(dim.getName()); } diff --git a/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java b/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java index 0ee31437874..954c31cc0da 100755 --- a/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java +++ b/extensions-contrib/parquet-extensions/src/main/java/org/apache/parquet/avro/DruidParquetReadSupport.java @@ -21,8 +21,6 @@ package org.apache.parquet.avro; //CHECKSTYLE.ON: PackageName -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.avro.Schema; import org.apache.avro.generic.GenericRecord; import org.apache.druid.data.input.impl.DimensionSchema; @@ -35,6 +33,8 @@ import org.apache.parquet.io.api.RecordMaterializer; import org.apache.parquet.schema.MessageType; import org.apache.parquet.schema.Type; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -62,17 +62,17 @@ public class DruidParquetReadSupport extends AvroReadSupport String tsField = config.getParser().getParseSpec().getTimestampSpec().getTimestampColumn(); List dimensionSchema = config.getParser().getParseSpec().getDimensionsSpec().getDimensions(); - Set dimensions = Sets.newHashSet(); + Set dimensions = new HashSet<>(); for (DimensionSchema dim : dimensionSchema) { dimensions.add(dim.getName()); } - Set metricsFields = Sets.newHashSet(); + Set metricsFields = new HashSet<>(); for (AggregatorFactory agg : config.getSchema().getDataSchema().getAggregators()) { metricsFields.addAll(agg.requiredFields()); } - List partialFields = Lists.newArrayList(); + List partialFields = new ArrayList<>(); for (Type type : fullSchema.getFields()) { if (tsField.equals(type.getName()) diff --git a/extensions-contrib/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DruidParquetInputTest.java b/extensions-contrib/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DruidParquetInputTest.java index 8b3ff2e09fa..24346275b80 100644 --- a/extensions-contrib/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DruidParquetInputTest.java +++ b/extensions-contrib/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DruidParquetInputTest.java @@ -18,7 +18,6 @@ */ package org.apache.druid.data.input.parquet; -import com.google.common.collect.Lists; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; import org.apache.druid.data.input.InputRow; @@ -38,6 +37,7 @@ import org.junit.Test; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; @@ -129,7 +129,7 @@ public class DruidParquetInputTest TaskAttemptContext context = new TaskAttemptContextImpl(job.getConfiguration(), new TaskAttemptID()); try (RecordReader reader = inputFormat.createRecordReader(split, context)) { - List records = Lists.newArrayList(); + List records = new ArrayList<>(); InputRowParser parser = config.getParser(); reader.initialize(split, context); diff --git a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java b/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java index 0c537468039..1a1165399ef 100644 --- a/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java +++ b/extensions-contrib/time-min-max/src/test/java/org/apache/druid/query/aggregation/TimestampGroupByAggregationTest.java @@ -43,6 +43,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.sql.Timestamp; +import java.util.ArrayList; import java.util.List; import java.util.zip.ZipFile; @@ -62,7 +63,7 @@ public class TimestampGroupByAggregationTest @Parameterized.Parameters(name = "{index}: Test for {0}, config = {1}") public static Iterable constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); final List> partialConstructors = ImmutableList.of( ImmutableList.of("timeMin", "tmin", "time_min", DateTimes.of("2011-01-12T01:00:00.000Z")), diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java index 137855918e1..65988d7a34b 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchUnionPostAggregator.java @@ -21,24 +21,22 @@ package org.apache.druid.query.aggregation.datasketches.hll; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Sets; import com.yahoo.sketches.hll.HllSketch; import com.yahoo.sketches.hll.TgtHllType; import com.yahoo.sketches.hll.Union; - import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.AggregatorUtil; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; +import javax.annotation.Nullable; import java.util.Comparator; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; -import javax.annotation.Nullable; - /** * Returns a union of a given list of sketches. * @author Alexander Saydakov @@ -94,7 +92,7 @@ public class HllSketchUnionPostAggregator implements PostAggregator @Override public Set getDependentFields() { - final Set dependentFields = Sets.newLinkedHashSet(); + final Set dependentFields = new LinkedHashSet<>(); for (final PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimatePostAggregator.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimatePostAggregator.java index 9b3f148f0eb..5bf050dbd03 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimatePostAggregator.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchEstimatePostAggregator.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import com.google.common.primitives.Doubles; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; @@ -31,6 +30,7 @@ import org.apache.druid.query.aggregation.post.PostAggregatorIds; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Map; import java.util.Set; public class SketchEstimatePostAggregator implements PostAggregator @@ -55,7 +55,7 @@ public class SketchEstimatePostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); dependentFields.addAll(field.getDependentFields()); return dependentFields; } diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java index 7fc604697fe..da4d17b847b 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/theta/SketchSetPostAggregator.java @@ -21,7 +21,6 @@ package org.apache.druid.query.aggregation.datasketches.theta; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Sets; import com.yahoo.sketches.Util; import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.query.aggregation.post.PostAggregatorIds; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -63,7 +63,7 @@ public class SketchSetPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newLinkedHashSet(); + Set dependentFields = new LinkedHashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorTest.java index 8f95b354cc0..1eb21ed9cf5 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/hll/HllSketchAggregatorTest.java @@ -19,19 +19,6 @@ package org.apache.druid.query.aggregation.datasketches.hll; -import java.io.File; -import java.util.Collection; -import java.util.List; - -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import com.google.common.collect.Lists; - import org.apache.druid.data.input.Row; import org.apache.druid.initialization.DruidModule; import org.apache.druid.java.util.common.granularity.Granularities; @@ -39,6 +26,17 @@ import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.query.aggregation.AggregationTestHelper; import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.groupby.GroupByQueryRunnerTest; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TemporaryFolder; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +import java.io.File; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; @RunWith(Parameterized.class) public class HllSketchAggregatorTest @@ -59,7 +57,7 @@ public class HllSketchAggregatorTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[] {config}); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java index 6dab564d5c7..5e321a32887 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchAggregatorTest.java @@ -20,7 +20,6 @@ package org.apache.druid.query.aggregation.datasketches.quantiles; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import org.apache.druid.data.input.Row; import org.apache.druid.initialization.DruidModule; import org.apache.druid.jackson.DefaultObjectMapper; @@ -40,6 +39,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -68,7 +68,7 @@ public class DoublesSketchAggregatorTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java index b7f1abc1220..1e708128ddb 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationTest.java @@ -53,6 +53,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; @@ -82,7 +83,7 @@ public class SketchAggregationTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java index ab357a5f4a0..cee95a6a451 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/SketchAggregationWithSimpleDataTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.datasketches.theta; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.io.Files; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; @@ -48,6 +47,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -73,7 +73,7 @@ public class SketchAggregationWithSimpleDataTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java index 32daffb7899..91550615a1c 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/theta/oldapi/OldApiSketchAggregationTest.java @@ -48,6 +48,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -76,7 +77,7 @@ public class OldApiSketchAggregationTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregationTest.java b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregationTest.java index e7abdcfa565..bb1e70c40ed 100644 --- a/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregationTest.java +++ b/extensions-core/datasketches/src/test/java/org/apache/druid/query/aggregation/datasketches/tuple/ArrayOfDoublesSketchAggregationTest.java @@ -19,7 +19,6 @@ package org.apache.druid.query.aggregation.datasketches.tuple; -import com.google.common.collect.Lists; import com.yahoo.sketches.quantiles.DoublesSketch; import org.apache.druid.data.input.Row; import org.apache.druid.initialization.DruidModule; @@ -38,6 +37,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -61,7 +61,7 @@ public class ArrayOfDoublesSketchAggregationTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[] {config}); } diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java index 22748758a2f..714fe8f3bad 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/BasicAuthUtils.java @@ -21,7 +21,6 @@ package org.apache.druid.security.basic; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.RE; import org.apache.druid.java.util.common.StringUtils; @@ -41,6 +40,7 @@ import java.security.NoSuchAlgorithmException; import java.security.SecureRandom; import java.security.spec.InvalidKeySpecException; import java.util.Base64; +import java.util.HashMap; import java.util.Map; public class BasicAuthUtils @@ -150,7 +150,7 @@ public class BasicAuthUtils { Map userMap; if (userMapBytes == null) { - userMap = Maps.newHashMap(); + userMap = new HashMap<>(); } else { try { userMap = objectMapper.readValue(userMapBytes, AUTHENTICATOR_USER_MAP_TYPE_REFERENCE); @@ -182,7 +182,7 @@ public class BasicAuthUtils { Map userMap; if (userMapBytes == null) { - userMap = Maps.newHashMap(); + userMap = new HashMap<>(); } else { try { userMap = objectMapper.readValue(userMapBytes, BasicAuthUtils.AUTHORIZER_USER_MAP_TYPE_REFERENCE); @@ -211,7 +211,7 @@ public class BasicAuthUtils { Map roleMap; if (roleMapBytes == null) { - roleMap = Maps.newHashMap(); + roleMap = new HashMap<>(); } else { try { roleMap = objectMapper.readValue(roleMapBytes, BasicAuthUtils.AUTHORIZER_ROLE_MAP_TYPE_REFERENCE); diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/CoordinatorBasicAuthenticatorResourceHandler.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/CoordinatorBasicAuthenticatorResourceHandler.java index bdf2f9c94a7..714e3da4e72 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/CoordinatorBasicAuthenticatorResourceHandler.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/CoordinatorBasicAuthenticatorResourceHandler.java @@ -21,7 +21,6 @@ package org.apache.druid.security.basic.authentication.endpoint; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.guice.annotations.Smile; import org.apache.druid.java.util.common.StringUtils; @@ -54,7 +53,7 @@ public class CoordinatorBasicAuthenticatorResourceHandler implements BasicAuthen this.storageUpdater = storageUpdater; this.objectMapper = objectMapper; - this.authenticatorMap = Maps.newHashMap(); + this.authenticatorMap = new HashMap<>(); for (Map.Entry authenticatorEntry : authenticatorMapper.getAuthenticatorMap().entrySet()) { final String authenticatorName = authenticatorEntry.getKey(); final Authenticator authenticator = authenticatorEntry.getValue(); diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/DefaultBasicAuthenticatorResourceHandler.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/DefaultBasicAuthenticatorResourceHandler.java index ef6b370775c..4f196f71912 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/DefaultBasicAuthenticatorResourceHandler.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authentication/endpoint/DefaultBasicAuthenticatorResourceHandler.java @@ -20,7 +20,6 @@ package org.apache.druid.security.basic.authentication.endpoint; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; @@ -50,7 +49,7 @@ public class DefaultBasicAuthenticatorResourceHandler implements BasicAuthentica { this.cacheManager = cacheManager; - this.authenticatorMap = Maps.newHashMap(); + this.authenticatorMap = new HashMap<>(); for (Map.Entry authenticatorEntry : authenticatorMapper.getAuthenticatorMap().entrySet()) { final String authenticatorName = authenticatorEntry.getKey(); final Authenticator authenticator = authenticatorEntry.getValue(); diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/CoordinatorBasicAuthorizerResourceHandler.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/CoordinatorBasicAuthorizerResourceHandler.java index d3e6e6c221e..6bd951fccdc 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/CoordinatorBasicAuthorizerResourceHandler.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/CoordinatorBasicAuthorizerResourceHandler.java @@ -21,7 +21,6 @@ package org.apache.druid.security.basic.authorization.endpoint; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.guice.annotations.Smile; import org.apache.druid.java.util.common.StringUtils; @@ -64,7 +63,7 @@ public class CoordinatorBasicAuthorizerResourceHandler implements BasicAuthorize this.storageUpdater = storageUpdater; this.objectMapper = objectMapper; - this.authorizerMap = Maps.newHashMap(); + this.authorizerMap = new HashMap<>(); for (Map.Entry authorizerEntry : authorizerMapper.getAuthorizerMap().entrySet()) { final String authorizerName = authorizerEntry.getKey(); final Authorizer authorizer = authorizerEntry.getValue(); diff --git a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/DefaultBasicAuthorizerResourceHandler.java b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/DefaultBasicAuthorizerResourceHandler.java index 9280e3a8ed6..94a63b6f85a 100644 --- a/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/DefaultBasicAuthorizerResourceHandler.java +++ b/extensions-core/druid-basic-security/src/main/java/org/apache/druid/security/basic/authorization/endpoint/DefaultBasicAuthorizerResourceHandler.java @@ -20,7 +20,6 @@ package org.apache.druid.security.basic.authorization.endpoint; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; @@ -51,7 +50,7 @@ public class DefaultBasicAuthorizerResourceHandler implements BasicAuthorizerRes { this.cacheManager = cacheManager; - this.authorizerMap = Maps.newHashMap(); + this.authorizerMap = new HashMap<>(); for (Map.Entry authorizerEntry : authorizerMapper.getAuthorizerMap().entrySet()) { final String authorizerName = authorizerEntry.getKey(); final Authorizer authorizer = authorizerEntry.getValue(); diff --git a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java index c2fe5944a98..5b565e177a9 100644 --- a/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java +++ b/extensions-core/druid-basic-security/src/test/java/org/apache/druid/security/authorization/CoordinatorBasicAuthorizerMetadataStorageUpdaterTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; import org.apache.druid.metadata.MetadataStorageTablesConfig; import org.apache.druid.metadata.TestDerbyConnector; import org.apache.druid.security.basic.BasicAuthCommonCacheConfig; @@ -46,6 +45,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -121,7 +121,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest public void testCreateDeleteUser() { updater.createUser(AUTHORIZER_NAME, "druid"); - Map expectedUserMap = Maps.newHashMap(BASE_USER_MAP); + Map expectedUserMap = new HashMap<>(BASE_USER_MAP); expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of())); Map actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap( objectMapper, @@ -160,7 +160,7 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest public void testCreateDeleteRole() { updater.createRole(AUTHORIZER_NAME, "druid"); - Map expectedRoleMap = Maps.newHashMap(BASE_ROLE_MAP); + Map expectedRoleMap = new HashMap<>(BASE_ROLE_MAP); expectedRoleMap.put("druid", new BasicAuthorizerRole("druid", ImmutableList.of())); Map actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap( objectMapper, @@ -202,10 +202,10 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest updater.createRole(AUTHORIZER_NAME, "druidRole"); updater.assignRole(AUTHORIZER_NAME, "druid", "druidRole"); - Map expectedUserMap = Maps.newHashMap(BASE_USER_MAP); + Map expectedUserMap = new HashMap<>(BASE_USER_MAP); expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of("druidRole"))); - Map expectedRoleMap = Maps.newHashMap(BASE_ROLE_MAP); + Map expectedRoleMap = new HashMap<>(BASE_ROLE_MAP); expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of())); Map actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap( @@ -270,10 +270,10 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest updater.createUser(AUTHORIZER_NAME, "druid"); updater.createRole(AUTHORIZER_NAME, "druidRole"); - Map expectedUserMap = Maps.newHashMap(BASE_USER_MAP); + Map expectedUserMap = new HashMap<>(BASE_USER_MAP); expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of())); - Map expectedRoleMap = Maps.newHashMap(BASE_ROLE_MAP); + Map expectedRoleMap = new HashMap<>(BASE_ROLE_MAP); expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of())); Map actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap( @@ -309,10 +309,10 @@ public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest updater.setPermissions(AUTHORIZER_NAME, "druidRole", permsToAdd); - Map expectedUserMap = Maps.newHashMap(BASE_USER_MAP); + Map expectedUserMap = new HashMap<>(BASE_USER_MAP); expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of("druidRole"))); - Map expectedRoleMap = Maps.newHashMap(BASE_ROLE_MAP); + Map expectedRoleMap = new HashMap<>(BASE_ROLE_MAP); expectedRoleMap.put( "druidRole", new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(permsToAdd)) diff --git a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index ee99a4b4900..630aa2447fd 100644 --- a/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/org/apache/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -32,8 +32,6 @@ import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.databind.ser.std.ToStringSerializer; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.druid.indexer.Bucket; import org.apache.druid.indexer.HadoopDruidIndexerConfig; @@ -67,7 +65,9 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; /** */ @@ -150,9 +150,9 @@ public class HdfsDataSegmentPusherTest "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, size @@ -195,9 +195,9 @@ public class HdfsDataSegmentPusherTest "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), new NumberedShardSpec(i, i), 0, size @@ -301,9 +301,9 @@ public class HdfsDataSegmentPusherTest "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, size diff --git a/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java b/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java index d09f5774358..9c5c8e4e156 100644 --- a/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java +++ b/extensions-core/histogram/src/main/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogram.java @@ -21,14 +21,13 @@ package org.apache.druid.query.aggregation.histogram; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Floats; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1378,7 +1377,7 @@ public class ApproximateHistogram } else { byte approxCount = (byte) (-1 * count); - Map approx = Maps.newHashMap(); + Map approx = new HashMap<>(); for (int i = 0; i < approxCount; ++i) { final float value = buf.getFloat(); @@ -1394,7 +1393,7 @@ public class ApproximateHistogram byte exactCount = buf.get(); - Map exact = Maps.newHashMap(); + Map exact = new HashMap<>(); for (int i = 0; i < exactCount; ++i) { final float value = buf.getFloat(); @@ -1407,7 +1406,7 @@ public class ApproximateHistogram int binCount = exact.size() + approx.size(); - List pos = Lists.newArrayList(); + List pos = new ArrayList<>(); pos.addAll(exact.keySet()); pos.addAll(approx.keySet()); Collections.sort(pos); diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java index 9c845fad623..793270a2be6 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramAggregationTest.java @@ -36,6 +36,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -63,7 +64,7 @@ public class ApproximateHistogramAggregationTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java index ff81674a77a..61acb6c7cc8 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java @@ -20,7 +20,6 @@ package org.apache.druid.query.aggregation.histogram; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.druid.data.input.Row; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.StringUtils; @@ -43,6 +42,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -111,7 +111,7 @@ public class ApproximateHistogramGroupByQueryTest v1Config.setMaxIntermediateRows(10000); v1SingleThreadedConfig.setMaxIntermediateRows(10000); - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); final List configs = ImmutableList.of( v1Config, v1SingleThreadedConfig, diff --git a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/IncrementalPublishingKafkaIndexTaskRunner.java b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/IncrementalPublishingKafkaIndexTaskRunner.java index 34eca06c626..a66ad4c726d 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/IncrementalPublishingKafkaIndexTaskRunner.java +++ b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/IncrementalPublishingKafkaIndexTaskRunner.java @@ -29,7 +29,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import com.google.common.util.concurrent.FutureCallback; @@ -927,7 +926,7 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask private Map getTaskCompletionUnparseableEvents() { - Map unparseableEventsMap = Maps.newHashMap(); + Map unparseableEventsMap = new HashMap<>(); List buildSegmentsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions( savedParseExceptions ); @@ -939,7 +938,7 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask private Map getTaskCompletionRowStats() { - Map metrics = Maps.newHashMap(); + Map metrics = new HashMap<>(); metrics.put( RowIngestionMeters.BUILD_SEGMENTS, rowIngestionMeters.getTotals() @@ -974,7 +973,7 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask private Set assignPartitionsAndSeekToNext(KafkaConsumer consumer, String topic) { // Initialize consumer assignment. - final Set assignment = Sets.newHashSet(); + final Set assignment = new HashSet<>(); for (Map.Entry entry : nextOffsets.entrySet()) { final long endOffset = endOffsets.get(entry.getKey()); if (entry.getValue() < endOffset) { @@ -1042,7 +1041,7 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask TaskToolbox taskToolbox ) throws InterruptedException, IOException { - final Map resetPartitions = Maps.newHashMap(); + final Map resetPartitions = new HashMap<>(); boolean doReset = false; if (tuningConfig.isResetOffsetAutomatically()) { for (Map.Entry outOfRangePartition : outOfRangePartitions.entrySet()) { @@ -1121,7 +1120,7 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask private void sendResetRequestAndWait(Map outOfRangePartitions, TaskToolbox taskToolbox) throws IOException { - Map partitionOffsetMap = Maps.newHashMap(); + Map partitionOffsetMap = new HashMap<>(); for (Map.Entry outOfRangePartition : outOfRangePartitions.entrySet()) { partitionOffsetMap.put(outOfRangePartition.getKey().partition(), outOfRangePartition.getValue()); } @@ -1292,9 +1291,9 @@ public class IncrementalPublishingKafkaIndexTaskRunner implements KafkaIndexTask ) { authorizationCheck(req, Action.READ); - Map returnMap = Maps.newHashMap(); - Map totalsMap = Maps.newHashMap(); - Map averagesMap = Maps.newHashMap(); + Map returnMap = new HashMap<>(); + Map totalsMap = new HashMap<>(); + Map averagesMap = new HashMap<>(); totalsMap.put( RowIngestionMeters.BUILD_SEGMENTS, diff --git a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadata.java b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadata.java index 3e5bec6d953..16909892771 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadata.java +++ b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/KafkaDataSourceMetadata.java @@ -21,10 +21,10 @@ package org.apache.druid.indexing.kafka; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Maps; import org.apache.druid.indexing.overlord.DataSourceMetadata; import org.apache.druid.java.util.common.IAE; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -77,7 +77,7 @@ public class KafkaDataSourceMetadata implements DataSourceMetadata if (that.getKafkaPartitions().getTopic().equals(kafkaPartitions.getTopic())) { // Same topic, merge offsets. - final Map newMap = Maps.newHashMap(); + final Map newMap = new HashMap<>(); for (Map.Entry entry : kafkaPartitions.getPartitionOffsetMap().entrySet()) { newMap.put(entry.getKey(), entry.getValue()); @@ -109,7 +109,7 @@ public class KafkaDataSourceMetadata implements DataSourceMetadata if (that.getKafkaPartitions().getTopic().equals(kafkaPartitions.getTopic())) { // Same topic, remove partitions present in "that" from "this" - final Map newMap = Maps.newHashMap(); + final Map newMap = new HashMap<>(); for (Map.Entry entry : kafkaPartitions.getPartitionOffsetMap().entrySet()) { if (!that.getKafkaPartitions().getPartitionOffsetMap().containsKey(entry.getKey())) { diff --git a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/LegacyKafkaIndexTaskRunner.java b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/LegacyKafkaIndexTaskRunner.java index 88dfe70ef7b..5c33efa1e52 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/LegacyKafkaIndexTaskRunner.java +++ b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/LegacyKafkaIndexTaskRunner.java @@ -25,8 +25,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.data.input.Committer; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.InputRowParser; @@ -307,7 +305,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner } // Set up sequenceNames. - final Map sequenceNames = Maps.newHashMap(); + final Map sequenceNames = new HashMap<>(); for (Integer partitionNum : nextOffsets.keySet()) { sequenceNames.put(partitionNum, StringUtils.format("%s_%s", ioConfig.getBaseSequenceName(), partitionNum)); } @@ -599,7 +597,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner private Set assignPartitionsAndSeekToNext(KafkaConsumer consumer, String topic) { // Initialize consumer assignment. - final Set assignment = Sets.newHashSet(); + final Set assignment = new HashSet<>(); for (Map.Entry entry : nextOffsets.entrySet()) { final long endOffset = endOffsets.get(entry.getKey()); if (entry.getValue() < endOffset) { @@ -667,7 +665,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner TaskToolbox taskToolbox ) throws InterruptedException, IOException { - final Map resetPartitions = Maps.newHashMap(); + final Map resetPartitions = new HashMap<>(); boolean doReset = false; if (tuningConfig.isResetOffsetAutomatically()) { for (Map.Entry outOfRangePartition : outOfRangePartitions.entrySet()) { @@ -708,7 +706,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner private void sendResetRequestAndWait(Map outOfRangePartitions, TaskToolbox taskToolbox) throws IOException { - Map partitionOffsetMap = Maps.newHashMap(); + Map partitionOffsetMap = new HashMap<>(); for (Map.Entry outOfRangePartition : outOfRangePartitions.entrySet()) { partitionOffsetMap.put(outOfRangePartition.getKey().partition(), outOfRangePartition.getValue()); } @@ -783,7 +781,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner private Map getTaskCompletionUnparseableEvents() { - Map unparseableEventsMap = Maps.newHashMap(); + Map unparseableEventsMap = new HashMap<>(); List buildSegmentsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions( savedParseExceptions ); @@ -795,7 +793,7 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner private Map getTaskCompletionRowStats() { - Map metrics = Maps.newHashMap(); + Map metrics = new HashMap<>(); metrics.put( RowIngestionMeters.BUILD_SEGMENTS, rowIngestionMeters.getTotals() @@ -942,9 +940,9 @@ public class LegacyKafkaIndexTaskRunner implements KafkaIndexTaskRunner ) { authorizationCheck(req, Action.READ); - Map returnMap = Maps.newHashMap(); - Map totalsMap = Maps.newHashMap(); - Map averagesMap = Maps.newHashMap(); + Map returnMap = new HashMap<>(); + Map totalsMap = new HashMap<>(); + Map averagesMap = new HashMap<>(); totalsMap.put( RowIngestionMeters.BUILD_SEGMENTS, diff --git a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java index fedc77f5e89..8a199d4a3de 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -35,7 +35,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Longs; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; @@ -1084,8 +1083,8 @@ public class KafkaSupervisor implements Supervisor private void discoverTasks() throws ExecutionException, InterruptedException, TimeoutException { int taskCount = 0; - List futureTaskIds = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List futureTaskIds = new ArrayList<>(); + List> futures = new ArrayList<>(); List tasks = taskStorage.getActiveTasks(); final Map taskGroupsToVerify = new HashMap<>(); @@ -1441,8 +1440,8 @@ public class KafkaSupervisor implements Supervisor private void updateTaskStatus() throws ExecutionException, InterruptedException, TimeoutException { - final List> futures = Lists.newArrayList(); - final List futureTaskIds = Lists.newArrayList(); + final List> futures = new ArrayList<>(); + final List futureTaskIds = new ArrayList<>(); // update status (and startTime if unknown) of current tasks in taskGroups for (TaskGroup group : taskGroups.values()) { @@ -1509,8 +1508,8 @@ public class KafkaSupervisor implements Supervisor private void checkTaskDuration() throws InterruptedException, ExecutionException, TimeoutException { - final List>> futures = Lists.newArrayList(); - final List futureGroupIds = Lists.newArrayList(); + final List>> futures = new ArrayList<>(); + final List futureGroupIds = new ArrayList<>(); for (Entry entry : taskGroups.entrySet()) { Integer groupId = entry.getKey(); @@ -1610,7 +1609,7 @@ public class KafkaSupervisor implements Supervisor } // 2) Pause running tasks - final List>> pauseFutures = Lists.newArrayList(); + final List>> pauseFutures = new ArrayList<>(); final List pauseTaskIds = ImmutableList.copyOf(taskGroup.taskIds()); for (final String taskId : pauseTaskIds) { pauseFutures.add(taskClient.pauseAsync(taskId)); @@ -1646,7 +1645,7 @@ public class KafkaSupervisor implements Supervisor // 4) Set the end offsets for each task to the values from step 3 and resume the tasks. All the tasks should // finish reading and start publishing within a short period, depending on how in sync the tasks were. - final List> setEndOffsetFutures = Lists.newArrayList(); + final List> setEndOffsetFutures = new ArrayList<>(); final List setEndOffsetTaskIds = ImmutableList.copyOf(taskGroup.taskIds()); if (setEndOffsetTaskIds.isEmpty()) { @@ -1711,14 +1710,14 @@ public class KafkaSupervisor implements Supervisor */ private void checkPendingCompletionTasks() throws ExecutionException, InterruptedException, TimeoutException { - List> futures = Lists.newArrayList(); + List> futures = new ArrayList<>(); for (Entry> pendingGroupList : pendingCompletionTaskGroups.entrySet()) { boolean stopTasksInTaskGroup = false; Integer groupId = pendingGroupList.getKey(); CopyOnWriteArrayList taskGroupList = pendingGroupList.getValue(); - List toRemove = Lists.newArrayList(); + List toRemove = new ArrayList<>(); for (TaskGroup group : taskGroupList) { boolean foundSuccess = false, entireTaskGroupFailed = false; @@ -1793,7 +1792,7 @@ public class KafkaSupervisor implements Supervisor private void checkCurrentTaskState() throws ExecutionException, InterruptedException, TimeoutException { - List> futures = Lists.newArrayList(); + List> futures = new ArrayList<>(); Iterator> iTaskGroups = taskGroups.entrySet().iterator(); while (iTaskGroups.hasNext()) { Entry taskGroupEntry = iTaskGroups.next(); @@ -1914,7 +1913,7 @@ public class KafkaSupervisor implements Supervisor } TaskGroup group = taskGroups.get(groupId); - Map consumerProperties = Maps.newHashMap(ioConfig.getConsumerProperties()); + Map consumerProperties = new HashMap<>(ioConfig.getConsumerProperties()); DateTime minimumMessageTime = taskGroups.get(groupId).minimumMessageTime.orNull(); DateTime maximumMessageTime = taskGroups.get(groupId).maximumMessageTime.orNull(); @@ -2101,7 +2100,7 @@ public class KafkaSupervisor implements Supervisor return Futures.immediateFuture(null); } - final List> futures = Lists.newArrayList(); + final List> futures = new ArrayList<>(); for (Entry entry : taskGroup.tasks.entrySet()) { final String taskId = entry.getKey(); final TaskData taskData = entry.getValue(); @@ -2184,7 +2183,7 @@ public class KafkaSupervisor implements Supervisor payload ); - List taskReports = Lists.newArrayList(); + List taskReports = new ArrayList<>(); try { for (TaskGroup taskGroup : taskGroups.values()) { @@ -2392,7 +2391,7 @@ public class KafkaSupervisor implements Supervisor private Map> getCurrentTotalStats() throws InterruptedException, ExecutionException, TimeoutException { - Map> allStats = Maps.newHashMap(); + Map> allStats = new HashMap<>(); final List> futures = new ArrayList<>(); final List> groupAndTaskIds = new ArrayList<>(); @@ -2438,7 +2437,7 @@ public class KafkaSupervisor implements Supervisor for (int i = 0; i < results.size(); i++) { StatsFromTaskResult result = results.get(i); if (result != null) { - Map groupMap = allStats.computeIfAbsent(result.getGroupId(), k -> Maps.newHashMap()); + Map groupMap = allStats.computeIfAbsent(result.getGroupId(), k -> new HashMap<>()); groupMap.put(result.getTaskId(), result.getStats()); } else { Pair groupAndTaskId = groupAndTaskIds.get(i); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 4940ec8a9c7..19872b7de18 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -59,6 +59,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.net.URL; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -629,8 +630,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "stop"))); futures.add(client.stopAsync(testId, false)); @@ -661,8 +662,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "resume"))); futures.add(client.resumeAsync(testId)); @@ -694,8 +695,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List>> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List>> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "pause"))); futures.add(client.pauseAsync(testId)); @@ -727,8 +728,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "status"))); futures.add(client.getStatusAsync(testId)); @@ -761,8 +762,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "time/start"))); futures.add(client.getStartTimeAsync(testId)); @@ -794,8 +795,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List>> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List>> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "offsets/current"))); futures.add(client.getCurrentOffsetsAsync(testId, false)); @@ -827,8 +828,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List>> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List>> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, testId, "offsets/end"))); futures.add(client.getEndOffsetsAsync(testId)); @@ -860,8 +861,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add(new URL(StringUtils.format( URL_FORMATTER, @@ -899,8 +900,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport ).times(numRequests); replayAll(); - List expectedUrls = Lists.newArrayList(); - List> futures = Lists.newArrayList(); + List expectedUrls = new ArrayList<>(); + List> futures = new ArrayList<>(); for (String testId : TEST_IDS) { expectedUrls.add( new URL( diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java index 81aad11f69a..6fdfcd8b935 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -30,8 +30,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; @@ -162,8 +160,10 @@ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; @@ -189,7 +189,7 @@ public class KafkaIndexTaskTest private static ListeningExecutorService taskExec; private static int topicPostfix; - private final List runningTasks = Lists.newArrayList(); + private final List runningTasks = new ArrayList<>(); private long handoffConditionTimeout = 0; private boolean reportParseExceptions = false; @@ -210,7 +210,7 @@ public class KafkaIndexTaskTest private String topic; private List> records; private final boolean isIncrementalHandoffSupported; - private final Set checkpointRequestsHash = Sets.newHashSet(); + private final Set checkpointRequestsHash = new HashSet<>(); private File reportsFile; private RowIngestionMetersFactory rowIngestionMetersFactory; @@ -2234,7 +2234,7 @@ public class KafkaIndexTaskTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; toolboxFactory = new TaskToolboxFactory( @@ -2335,7 +2335,7 @@ public class KafkaIndexTaskTest IndexIO indexIO = new TestUtils().getTestIndexIO(); QueryableIndex index = indexIO.loadIndex(outputLocation); DictionaryEncodedColumn theColumn = (DictionaryEncodedColumn) index.getColumnHolder(column).getColumn(); - List values = Lists.newArrayList(); + List values = new ArrayList<>(); for (int i = 0; i < theColumn.length(); i++) { int id = theColumn.getSingleValueRow(i); String value = theColumn.lookupName(id); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java index c1a06716a3c..4b8e02c2d73 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/test/TestBroker.java @@ -20,7 +20,6 @@ package org.apache.druid.indexing.kafka.test; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.io.Files; import kafka.server.KafkaConfig; import kafka.server.KafkaServer; @@ -38,6 +37,7 @@ import javax.annotation.Nullable; import java.io.Closeable; import java.io.File; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.ThreadLocalRandom; @@ -102,7 +102,7 @@ public class TestBroker implements Closeable public Map producerProperties() { - final Map props = Maps.newHashMap(); + final Map props = new HashMap<>(); props.put("bootstrap.servers", StringUtils.format("localhost:%d", getPort())); props.put("key.serializer", ByteArraySerializer.class.getName()); props.put("value.serializer", ByteArraySerializer.class.getName()); @@ -112,7 +112,7 @@ public class TestBroker implements Closeable public Map consumerProperties() { - final Map props = Maps.newHashMap(); + final Map props = new HashMap<>(); props.put("bootstrap.servers", StringUtils.format("localhost:%d", getPort())); props.put("key.deserializer", ByteArrayDeserializer.class.getName()); props.put("value.deserializer", ByteArrayDeserializer.class.getName()); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentFinderTest.java index 75cb14b361d..d7f8b6d13e4 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentFinderTest.java @@ -36,7 +36,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; @@ -354,7 +353,7 @@ public class S3DataSegmentFinderTest private static class MockAmazonS3Client extends ServerSideEncryptingAmazonS3 { private final File baseDir; - private final Map> storage = Maps.newHashMap(); + private final Map> storage = new HashMap<>(); public MockAmazonS3Client(File baseDir) { diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java index c1319633e94..c4963f2e5af 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java @@ -35,8 +35,6 @@ import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.MapUtils; import org.apache.druid.segment.loading.SegmentLoadingException; @@ -46,6 +44,7 @@ import org.junit.Assert; import org.junit.Test; import java.io.File; +import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -182,7 +181,7 @@ public class S3DataSegmentMoverTest private static class MockAmazonS3Client extends ServerSideEncryptingAmazonS3 { - Map> storage = Maps.newHashMap(); + Map> storage = new HashMap<>(); boolean copied = false; boolean deletedOld = false; @@ -271,7 +270,7 @@ public class S3DataSegmentMoverTest public PutObjectResult putObject(String bucketName, String key, File file) { if (!storage.containsKey(bucketName)) { - storage.put(bucketName, Sets.newHashSet()); + storage.put(bucketName, new HashSet<>()); } storage.get(bucketName).add(key); return new PutObjectResult(); diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java index 8c38a907822..bc57e2a76cc 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherTest.java @@ -27,8 +27,6 @@ import com.amazonaws.services.s3.model.Permission; import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.IOUtils; import org.apache.druid.jackson.DefaultObjectMapper; @@ -45,6 +43,8 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.FileInputStream; +import java.util.ArrayList; +import java.util.HashMap; /** */ @@ -133,9 +133,9 @@ public class S3DataSegmentPusherTest "foo", Intervals.of("2015/2016"), "0", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, size diff --git a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/PvaluefromZscorePostAggregator.java b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/PvaluefromZscorePostAggregator.java index a6b0daf3cd5..3199909850a 100644 --- a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/PvaluefromZscorePostAggregator.java +++ b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/PvaluefromZscorePostAggregator.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -35,6 +34,7 @@ import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Collections; import java.util.Comparator; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -65,7 +65,7 @@ public class PvaluefromZscorePostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); dependentFields.addAll(zScore.getDependentFields()); diff --git a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/ZtestPostAggregator.java b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/ZtestPostAggregator.java index 39c74ddd19c..53caa36d04c 100644 --- a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/ZtestPostAggregator.java +++ b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/teststats/ZtestPostAggregator.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; @@ -34,6 +33,7 @@ import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Collections; import java.util.Comparator; +import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; @@ -81,7 +81,7 @@ public class ZtestPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newLinkedHashSet(); + Set dependentFields = new LinkedHashSet<>(); dependentFields.addAll(successCount1.getDependentFields()); dependentFields.addAll(sample1Size.getDependentFields()); dependentFields.addAll(successCount2.getDependentFields()); diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java index de786113e86..a46b6c3c9fd 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java @@ -21,7 +21,6 @@ package org.apache.druid.query.aggregation.variance; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; import org.apache.druid.java.util.common.DateTimes; @@ -31,7 +30,9 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.aggregation.stats.DruidStatsModule; import org.joda.time.DateTime; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -69,7 +70,7 @@ public class VarianceTestHelper extends QueryRunnerTestHelper public static class RowBuilder { private final String[] names; - private final List rows = Lists.newArrayList(); + private final List rows = new ArrayList<>(); public RowBuilder(String[] names) { @@ -96,7 +97,7 @@ public class VarianceTestHelper extends QueryRunnerTestHelper { Preconditions.checkArgument(names.length == values.length); - Map theVals = Maps.newHashMap(); + Map theVals = new HashMap<>(); for (int i = 0; i < values.length; i++) { theVals.put(names[i], values[i]); } diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DetermineHashedPartitionsJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DetermineHashedPartitionsJob.java index 12d4c487c1f..c8696b563a7 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DetermineHashedPartitionsJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DetermineHashedPartitionsJob.java @@ -24,7 +24,6 @@ import com.google.common.base.Optional; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.io.Closeables; @@ -60,9 +59,12 @@ import javax.annotation.Nullable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** * Determines appropriate ShardSpecs for a job by determining approximate cardinality of data set using HyperLogLog @@ -155,7 +157,7 @@ public class DetermineHashedPartitionsJob implements Jobby ); log.info("Determined Intervals for Job [%s].", config.getSegmentGranularIntervals()); } - Map> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance()); + Map> shardSpecs = new TreeMap<>(DateTimeComparator.getInstance()); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { DateTime bucket = segmentGranularity.getStart(); @@ -277,7 +279,7 @@ public class DetermineHashedPartitionsJob implements Jobby hyperLogLogs = builder.build(); } else { determineIntervals = true; - hyperLogLogs = Maps.newHashMap(); + hyperLogLogs = new HashMap<>(); } } @@ -341,7 +343,7 @@ public class DetermineHashedPartitionsJob implements Jobby public static class DetermineCardinalityReducer extends Reducer { - private final List intervals = Lists.newArrayList(); + private final List intervals = new ArrayList<>(); protected HadoopDruidIndexerConfig config = null; private boolean determineIntervals; diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java index e0b355ad174..27cc9c3c215 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/DeterminePartitionsJob.java @@ -30,7 +30,6 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.PeekingIterator; import com.google.common.io.Closeables; import org.apache.druid.collections.CombiningIterable; @@ -75,10 +74,13 @@ import javax.annotation.Nullable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Comparator; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.TreeMap; /** * Determines appropriate ShardSpecs for a job by determining whether or not partitioning is necessary, and if so, @@ -228,7 +230,7 @@ public class DeterminePartitionsJob implements Jobby log.info("Job completed, loading up partitions for intervals[%s].", config.getSegmentGranularIntervals()); FileSystem fileSystem = null; - Map> shardSpecs = Maps.newTreeMap(); + Map> shardSpecs = new TreeMap<>(); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { final Path partitionInfoPath = config.makeSegmentPartitionInfoPath(segmentGranularity); @@ -395,7 +397,7 @@ public class DeterminePartitionsJob implements Jobby Context context ) throws IOException, InterruptedException { - final Map> dims = Maps.newHashMap(); + final Map> dims = new HashMap<>(); for (final String dim : inputRow.getDimensions()) { dims.put(dim, inputRow.getDimension(dim)); } @@ -628,7 +630,7 @@ public class DeterminePartitionsJob implements Jobby boolean currentDimSkip = false; // We'll store possible partitions in here - final Map dimPartitionss = Maps.newHashMap(); + final Map dimPartitionss = new HashMap<>(); while (iterator.hasNext()) { final DimValueCount dvc = iterator.next(); @@ -861,7 +863,7 @@ public class DeterminePartitionsJob implements Jobby private static class DimPartitions { public final String dim; - public final List partitions = Lists.newArrayList(); + public final List partitions = new ArrayList<>(); private DimPartitions(String dim) { diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidDetermineConfigurationJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidDetermineConfigurationJob.java index 929c9ecd5a9..294c6767f20 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidDetermineConfigurationJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidDetermineConfigurationJob.java @@ -20,7 +20,6 @@ package org.apache.druid.indexer; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec; @@ -31,6 +30,7 @@ import org.joda.time.Interval; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.TreeMap; /** */ @@ -58,7 +58,7 @@ public class HadoopDruidDetermineConfigurationJob implements Jobby return JobHelper.runSingleJob(job, config); } else { int shardsPerInterval = config.getPartitionsSpec().getNumShards(); - Map> shardSpecs = Maps.newTreeMap(); + Map> shardSpecs = new TreeMap<>(); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { DateTime bucket = segmentGranularity.getStart(); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java index 8cf808fe8e9..d52db6eb7bf 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java @@ -31,7 +31,6 @@ import com.google.common.base.Splitter; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; @@ -75,6 +74,7 @@ import java.io.Reader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -212,8 +212,8 @@ public class HadoopDruidIndexerConfig private HadoopIngestionSpec schema; private PathSpec pathSpec; - private final Map shardSpecLookups = Maps.newHashMap(); - private final Map> hadoopShardSpecLookup = Maps.newHashMap(); + private final Map shardSpecLookups = new HashMap<>(); + private final Map> hadoopShardSpecLookup = new HashMap<>(); private final Granularity rollupGran; private final List allowedHadoopPrefix; @@ -244,7 +244,7 @@ public class HadoopDruidIndexerConfig ) ); - Map innerHadoopShardSpecLookup = Maps.newHashMap(); + Map innerHadoopShardSpecLookup = new HashMap<>(); for (HadoopyShardSpec hadoopyShardSpec : entry.getValue()) { innerHadoopShardSpecLookup.put(hadoopyShardSpec.getActualSpec(), hadoopyShardSpec); } diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerJob.java index 2063222563a..331863abc9d 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerJob.java @@ -20,12 +20,12 @@ package org.apache.druid.indexer; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.google.inject.Inject; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.timeline.DataSegment; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -66,7 +66,7 @@ public class HadoopDruidIndexerJob implements Jobby @Override public boolean run() { - List jobs = Lists.newArrayList(); + List jobs = new ArrayList<>(); JobHelper.ensurePaths(config); indexJob = new IndexGeneratorJob(config); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopIngestionSpec.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopIngestionSpec.java index 1f631fd6e17..30dfdb4c65a 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopIngestionSpec.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopIngestionSpec.java @@ -22,7 +22,6 @@ package org.apache.druid.indexer; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import org.apache.druid.common.utils.UUIDUtils; import org.apache.druid.indexer.hadoop.DatasourceIngestionSpec; import org.apache.druid.indexer.hadoop.WindowedDataSegment; @@ -196,7 +195,7 @@ public class HadoopIngestionSpec extends IngestionSpec timeline = VersionedIntervalTimeline.forSegments(segmentsList); - final List windowedSegments = Lists.newArrayList(); + final List windowedSegments = new ArrayList<>(); for (Interval interval : ingestionSpecObj.getIntervals()) { final List> timeLineSegments = timeline.lookup(interval); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java index 9aec2d74a3d..9448206e116 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java @@ -25,7 +25,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.util.concurrent.Futures; @@ -80,11 +79,13 @@ import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -427,7 +428,7 @@ public class IndexGeneratorJob implements Jobby BytesWritable first = iter.next(); if (iter.hasNext()) { - LinkedHashSet dimOrder = Sets.newLinkedHashSet(); + LinkedHashSet dimOrder = new LinkedHashSet<>(); SortableBytes keyBytes = SortableBytes.fromBytesWritable(key); Bucket bucket = Bucket.fromGroupKey(keyBytes.getGroupKey()).lhs; IncrementalIndex index = makeIncrementalIndex(bucket, combiningAggs, config, null, null); @@ -560,7 +561,7 @@ public class IndexGeneratorJob implements Jobby public static class IndexGeneratorReducer extends Reducer { protected HadoopDruidIndexerConfig config; - private List metricNames = Lists.newArrayList(); + private List metricNames = new ArrayList<>(); private AggregatorFactory[] aggregators; private AggregatorFactory[] combiningAggs; private Map typeHelperMap; @@ -629,7 +630,7 @@ public class IndexGeneratorJob implements Jobby final Interval interval = config.getGranularitySpec().bucketInterval(bucket.time).get(); ListeningExecutorService persistExecutor = null; - List> persistFutures = Lists.newArrayList(); + List> persistFutures = new ArrayList<>(); IncrementalIndex index = makeIncrementalIndex( bucket, combiningAggs, @@ -642,13 +643,13 @@ public class IndexGeneratorJob implements Jobby baseFlushFile.delete(); baseFlushFile.mkdirs(); - Set toMerge = Sets.newTreeSet(); + Set toMerge = new TreeSet<>(); int indexCount = 0; int lineCount = 0; int runningTotalLineCount = 0; long startTime = System.currentTimeMillis(); - Set allDimensionNames = Sets.newLinkedHashSet(); + Set allDimensionNames = new LinkedHashSet<>(); final ProgressIndicator progressIndicator = makeProgressIndicator(context); int numBackgroundPersistThreads = config.getSchema().getTuningConfig().getNumBackgroundPersistThreads(); if (numBackgroundPersistThreads > 0) { diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/InputRowSerde.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/InputRowSerde.java index e0d01d08ec4..b9c48f45170 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/InputRowSerde.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/InputRowSerde.java @@ -21,7 +21,6 @@ package org.apache.druid.indexer; import com.google.common.base.Supplier; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.ByteArrayDataInput; import com.google.common.io.ByteArrayDataOutput; import com.google.common.io.ByteStreams; @@ -49,6 +48,7 @@ import javax.annotation.Nullable; import java.io.DataInput; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -74,7 +74,7 @@ public class InputRowSerde public static Map getTypeHelperMap(DimensionsSpec dimensionsSpec) { - Map typeHelperMap = Maps.newHashMap(); + Map typeHelperMap = new HashMap<>(); for (DimensionSchema dimensionSchema : dimensionsSpec.getDimensions()) { IndexSerdeTypeHelper typeHelper; switch (dimensionSchema.getValueType()) { @@ -416,10 +416,10 @@ public class InputRowSerde //Read timestamp long timestamp = in.readLong(); - Map event = Maps.newHashMap(); + Map event = new HashMap<>(); //Read dimensions - List dimensions = Lists.newArrayList(); + List dimensions = new ArrayList<>(); int dimNum = WritableUtils.readVInt(in); for (int i = 0; i < dimNum; i++) { String dimension = readString(in); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java index 9bfdeb15ac3..57883d6f66d 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java @@ -20,7 +20,6 @@ package org.apache.druid.indexer; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Maps; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.jackson.JacksonUtils; @@ -38,6 +37,7 @@ import org.apache.hadoop.util.ReflectionUtils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.HashMap; import java.util.Map; /** @@ -126,7 +126,7 @@ public class Utils public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper) { try { - Map taskDiagsMap = Maps.newHashMap(); + Map taskDiagsMap = new HashMap<>(); TaskCompletionEvent[] completionEvents = failedJob.getTaskCompletionEvents(0, 100); for (TaskCompletionEvent tce : completionEvents) { String[] taskDiags = failedJob.getTaskDiagnostics(tce.getTaskAttemptId()); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceRecordReader.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceRecordReader.java index 7c001683698..f7902a12de5 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceRecordReader.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/hadoop/DatasourceRecordReader.java @@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.List; public class DatasourceRecordReader extends RecordReader @@ -55,8 +56,8 @@ public class DatasourceRecordReader extends RecordReader private long rowNum; private Row currRow; - private List indexes = Lists.newArrayList(); - private List tmpSegmentDirs = Lists.newArrayList(); + private List indexes = new ArrayList<>(); + private List tmpSegmentDirs = new ArrayList<>(); private long numRows; @Override diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/DatasourcePathSpec.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/DatasourcePathSpec.java index 4a4509d2b78..424d5e7b820 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/DatasourcePathSpec.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/DatasourcePathSpec.java @@ -40,6 +40,7 @@ import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; import java.io.IOException; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -164,7 +165,7 @@ public class DatasourcePathSpec implements PathSpec } if (updatedIngestionSpec.getMetrics() == null) { - Set metrics = Sets.newHashSet(); + Set metrics = new HashSet<>(); final AggregatorFactory[] cols = config.getSchema().getDataSchema().getAggregators(); if (cols != null) { if (useNewAggs) { diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularUnprocessedPathSpec.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularUnprocessedPathSpec.java index f39d288e1c9..3891851d9ed 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularUnprocessedPathSpec.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularUnprocessedPathSpec.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.hadoop.FSSpideringIterator; import org.apache.druid.java.util.common.DateTimes; @@ -42,6 +41,7 @@ import java.io.IOException; import java.util.Map; import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; /** */ @@ -87,7 +87,7 @@ public class GranularUnprocessedPathSpec extends GranularityPathSpec inputModifiedTimes.put(key.getMillis(), currVal == null ? mTime : Math.max(currVal, mTime)); } - Set bucketsToRun = Sets.newTreeSet(Comparators.intervals()); + Set bucketsToRun = new TreeSet<>(Comparators.intervals()); for (Map.Entry entry : inputModifiedTimes.entrySet()) { DateTime timeBucket = DateTimes.utc(entry.getKey()); long mTime = entry.getValue(); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularityPathSpec.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularityPathSpec.java index 3494aba6b07..cd7cd5a6b2c 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularityPathSpec.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/GranularityPathSpec.java @@ -20,7 +20,6 @@ package org.apache.druid.indexer.path; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Sets; import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.hadoop.FSSpideringIterator; import org.apache.druid.java.util.common.granularity.Granularity; @@ -38,6 +37,7 @@ import org.joda.time.format.DateTimeFormatter; import java.io.IOException; import java.util.Set; +import java.util.TreeSet; import java.util.regex.Pattern; /** @@ -110,7 +110,7 @@ public class GranularityPathSpec implements PathSpec @Override public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException { - final Set intervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); + final Set intervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); for (Interval inputInterval : config.getInputIntervals()) { for (Interval interval : dataGranularity.getIterable(inputInterval)) { intervals.add(trim(inputInterval, interval)); @@ -119,7 +119,7 @@ public class GranularityPathSpec implements PathSpec Path betaInput = new Path(inputPath); FileSystem fs = betaInput.getFileSystem(job.getConfiguration()); - Set paths = Sets.newTreeSet(); + Set paths = new TreeSet<>(); Pattern fileMatcher = Pattern.compile(filePattern); DateTimeFormatter customFormatter = null; diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/StaticPathSpec.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/StaticPathSpec.java index 811c5f18189..690d20ef4c1 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/StaticPathSpec.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/path/StaticPathSpec.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; @@ -35,6 +34,7 @@ import org.apache.hadoop.mapreduce.lib.input.CombineTextInputFormat; import org.apache.hadoop.mapreduce.lib.input.MultipleInputs; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; +import java.util.LinkedHashSet; import java.util.Set; @@ -113,7 +113,7 @@ public class StaticPathSpec implements PathSpec // MultipleInputs.addInputPath(job, path, inputFormatClassToUse) // but have to handle hadoop glob path ourselves correctly // This change and HadoopGlobPathSplitter.java can be removed once the hadoop issue is fixed - Set pathStrings = Sets.newLinkedHashSet(); + Set pathStrings = new LinkedHashSet<>(); for (String path : paths) { Iterables.addAll(pathStrings, HadoopGlobPathSplitter.splitGlob(path)); } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/BatchDeltaIngestionTest.java index 72124fa77c2..b6827246047 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/BatchDeltaIngestionTest.java @@ -26,7 +26,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.InputRow; @@ -64,6 +63,7 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -419,7 +419,7 @@ public class BatchDeltaIngestionTest null ); - List rows = Lists.newArrayList(); + List rows = new ArrayList<>(); while (firehose.hasMore()) { rows.add(firehose.nextRow()); } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java index aa388261678..10911e70212 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; @@ -37,6 +36,7 @@ import org.apache.druid.timeline.partition.NoneShardSpec; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -55,7 +55,7 @@ public class HadoopDruidIndexerConfigTest @Test public void testHashedBucketSelection() { - List specs = Lists.newArrayList(); + List specs = new ArrayList<>(); final int partitionCount = 10; for (int i = 0; i < partitionCount; i++) { specs.add(new HadoopyShardSpec( diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java index 75a9293c40d..d19bcf55fcd 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -71,11 +70,13 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; @RunWith(Parameterized.class) public class IndexGeneratorJobTest @@ -380,7 +381,7 @@ public class IndexGeneratorJobTest ); // Run each baseConstructor with/without forceExtendableShardSpecs. - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (Object[] baseConstructor : baseConstructors) { for (int forceExtendableShardSpecs = 0; forceExtendableShardSpecs < 2; forceExtendableShardSpecs++) { final Object[] fullConstructor = new Object[baseConstructor.length + 1]; @@ -544,7 +545,7 @@ public class IndexGeneratorJobTest private List constructShardSpecFromShardInfo(String partitionType, Object[][] shardInfoForEachShard) { - List specs = Lists.newArrayList(); + List specs = new ArrayList<>(); if ("hashed".equals(partitionType)) { for (Integer[] shardInfo : (Integer[][]) shardInfoForEachShard) { specs.add(new HashBasedNumberedShardSpec(shardInfo[0], shardInfo[1], null, HadoopDruidIndexerConfig.JSON_MAPPER)); @@ -566,7 +567,7 @@ public class IndexGeneratorJobTest Object[][][] shardInfoForEachShard ) { - Map> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance()); + Map> shardSpecs = new TreeMap<>(DateTimeComparator.getInstance()); int shardCount = 0; int segmentNum = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/InputRowSerdeTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/InputRowSerdeTest.java index f6c6d5e6049..ce9b95cd185 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/InputRowSerdeTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/InputRowSerdeTest.java @@ -20,7 +20,6 @@ package org.apache.druid.indexer; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.apache.druid.common.config.NullHandling; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; @@ -46,6 +45,7 @@ import org.junit.rules.ExpectedException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -68,7 +68,7 @@ public class InputRowSerdeTest { this.timestamp = System.currentTimeMillis(); this.dims = ImmutableList.of("dim_non_existing", "d1", "d2", "d3", "d4", "d5"); - this.event = Maps.newHashMap(); + this.event = new HashMap<>(); event.put("d1", "d1v"); event.put("d2", ImmutableList.of("d2v1", "d2v2")); event.put("d3", 200L); diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceInputFormatTest.java index 71e55446241..2da6b210e87 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.Files; import org.apache.druid.indexer.JobHelper; @@ -54,6 +53,7 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -218,7 +218,7 @@ public class DatasourceInputFormatTest @Override public InputFormat get() { - final Map locationMap = Maps.newHashMap(); + final Map locationMap = new HashMap<>(); for (LocatedFileStatus status : locations) { locationMap.put(status.getPath().getName(), status); } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceRecordReaderTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceRecordReaderTest.java index 466a94f3a1f..77ce11d7a9e 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceRecordReaderTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/hadoop/DatasourceRecordReaderTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexer.hadoop; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.data.input.InputRow; import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.java.util.common.DateTimes; @@ -35,6 +34,7 @@ import org.junit.Assert; import org.junit.Test; import java.net.URL; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -86,7 +86,7 @@ public class DatasourceRecordReaderTest Assert.assertEquals(0, rr.getProgress(), 0.0001); - List rows = Lists.newArrayList(); + List rows = new ArrayList<>(); while (rr.nextKeyValue()) { rows.add(rr.getCurrentValue()); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java b/indexing-service/src/main/java/org/apache/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java index 7af0aab4195..aba4f66e6ab 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/appenderator/ActionBasedUsedSegmentChecker.java @@ -21,8 +21,6 @@ package org.apache.druid.indexing.appenderator; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.indexing.common.actions.SegmentListUsedAction; import org.apache.druid.indexing.common.actions.TaskActionClient; import org.apache.druid.java.util.common.JodaUtils; @@ -32,9 +30,11 @@ import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; import java.io.IOException; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; public class ActionBasedUsedSegmentChecker implements UsedSegmentChecker { @@ -49,15 +49,15 @@ public class ActionBasedUsedSegmentChecker implements UsedSegmentChecker public Set findUsedSegments(Set identifiers) throws IOException { // Group by dataSource - final Map> identifiersByDataSource = Maps.newTreeMap(); + final Map> identifiersByDataSource = new TreeMap<>(); for (SegmentIdentifier identifier : identifiers) { if (!identifiersByDataSource.containsKey(identifier.getDataSource())) { - identifiersByDataSource.put(identifier.getDataSource(), Sets.newHashSet()); + identifiersByDataSource.put(identifier.getDataSource(), new HashSet<>()); } identifiersByDataSource.get(identifier.getDataSource()).add(identifier); } - final Set retVal = Sets.newHashSet(); + final Set retVal = new HashSet<>(); for (Map.Entry> entry : identifiersByDataSource.entrySet()) { final List intervals = JodaUtils.condenseIntervals( diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskReport.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskReport.java index 6c37cfaf108..3cee6f2aa5c 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskReport.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/TaskReport.java @@ -21,8 +21,8 @@ package org.apache.druid.indexing.common; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; -import com.google.common.collect.Maps; +import java.util.HashMap; import java.util.Map; /** @@ -46,7 +46,7 @@ public interface TaskReport static Map buildTaskReports(TaskReport... taskReports) { - Map taskReportMap = Maps.newHashMap(); + Map taskReportMap = new HashMap<>(); for (TaskReport taskReport : taskReports) { taskReportMap.put(taskReport.getReportKey(), taskReport); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/index/YeOldePlumberSchool.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/index/YeOldePlumberSchool.java index ec2029ee2ba..29948d100e6 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/index/YeOldePlumberSchool.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/index/YeOldePlumberSchool.java @@ -28,8 +28,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.Committer; import org.apache.druid.data.input.InputRow; @@ -57,6 +55,8 @@ import org.joda.time.Interval; import java.io.File; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -116,7 +116,7 @@ public class YeOldePlumberSchool implements PlumberSchool final File persistDir = new File(tmpSegmentDir, theSink.getSegment().getIdentifier()); // Set of spilled segments. Will be merged at the end. - final Set spilled = Sets.newHashSet(); + final Set spilled = new HashSet<>(); return new Plumber() { @@ -180,7 +180,7 @@ public class YeOldePlumberSchool implements PlumberSchool } else if (spilled.size() == 1) { fileToUpload = Iterables.getOnlyElement(spilled); } else { - List indexes = Lists.newArrayList(); + List indexes = new ArrayList<>(); for (final File oneSpill : spilled) { indexes.add(indexIO.loadIndex(oneSpill)); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/stats/DropwizardRowIngestionMeters.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/stats/DropwizardRowIngestionMeters.java index ca541cce4dd..f47a8f40c78 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/stats/DropwizardRowIngestionMeters.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/stats/DropwizardRowIngestionMeters.java @@ -21,8 +21,8 @@ package org.apache.druid.indexing.common.stats; import com.codahale.metrics.Meter; import com.codahale.metrics.MetricRegistry; -import com.google.common.collect.Maps; +import java.util.HashMap; import java.util.Map; public class DropwizardRowIngestionMeters implements RowIngestionMeters @@ -108,21 +108,21 @@ public class DropwizardRowIngestionMeters implements RowIngestionMeters @Override public Map getMovingAverages() { - Map movingAverages = Maps.newHashMap(); + Map movingAverages = new HashMap<>(); - Map oneMinute = Maps.newHashMap(); + Map oneMinute = new HashMap<>(); oneMinute.put(PROCESSED, processed.getOneMinuteRate()); oneMinute.put(PROCESSED_WITH_ERROR, processedWithError.getOneMinuteRate()); oneMinute.put(UNPARSEABLE, unparseable.getOneMinuteRate()); oneMinute.put(THROWN_AWAY, thrownAway.getOneMinuteRate()); - Map fiveMinute = Maps.newHashMap(); + Map fiveMinute = new HashMap<>(); fiveMinute.put(PROCESSED, processed.getFiveMinuteRate()); fiveMinute.put(PROCESSED_WITH_ERROR, processedWithError.getFiveMinuteRate()); fiveMinute.put(UNPARSEABLE, unparseable.getFiveMinuteRate()); fiveMinute.put(THROWN_AWAY, thrownAway.getFiveMinuteRate()); - Map fifteenMinute = Maps.newHashMap(); + Map fifteenMinute = new HashMap<>(); fifteenMinute.put(PROCESSED, processed.getFifteenMinuteRate()); fifteenMinute.put(PROCESSED_WITH_ERROR, processedWithError.getFifteenMinuteRate()); fifteenMinute.put(UNPARSEABLE, unparseable.getFifteenMinuteRate()); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppendTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppendTask.java index a243d5c9798..a80c51956e6 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppendTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppendTask.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.druid.indexing.common.TaskToolbox; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -43,6 +42,7 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.io.File; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -109,7 +109,7 @@ public class AppendTask extends MergeTaskBase ) ); - List adapters = Lists.newArrayList(); + List adapters = new ArrayList<>(); for (final SegmentToMergeHolder holder : segmentsToMerge) { adapters.add( new RowFilteringIndexAdapter( diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTask.java index 069a55e566c..1dcf92b4b85 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTask.java @@ -28,7 +28,6 @@ import com.google.common.base.Optional; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; @@ -95,6 +94,7 @@ import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.File; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Queue; @@ -469,9 +469,9 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements ) { IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper); - Map returnMap = Maps.newHashMap(); - Map totalsMap = Maps.newHashMap(); - Map averagesMap = Maps.newHashMap(); + Map returnMap = new HashMap<>(); + Map totalsMap = new HashMap<>(); + Map averagesMap = new HashMap<>(); totalsMap.put( RowIngestionMeters.BUILD_SEGMENTS, @@ -533,7 +533,7 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements private Map getTaskCompletionUnparseableEvents() { - Map unparseableEventsMap = Maps.newHashMap(); + Map unparseableEventsMap = new HashMap<>(); List buildSegmentsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions( savedParseExceptions); if (buildSegmentsParseExceptionMessages != null) { @@ -544,7 +544,7 @@ public class AppenderatorDriverRealtimeIndexTask extends AbstractTask implements private Map getTaskCompletionRowStats() { - Map metricsMap = Maps.newHashMap(); + Map metricsMap = new HashMap<>(); metricsMap.put( RowIngestionMeters.BUILD_SEGMENTS, rowIngestionMeters.getTotals() diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java index 96861a94c2a..40cd7d519e9 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java @@ -30,7 +30,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import org.apache.druid.indexer.HadoopDruidDetermineConfigurationJob; import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.HadoopDruidIndexerJob; @@ -73,6 +72,7 @@ import javax.ws.rs.core.Response; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.SortedSet; @@ -421,8 +421,8 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler ) { IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper); - Map returnMap = Maps.newHashMap(); - Map totalsMap = Maps.newHashMap(); + Map returnMap = new HashMap<>(); + Map totalsMap = new HashMap<>(); if (determinePartitionsStatsGetter != null) { totalsMap.put(RowIngestionMeters.DETERMINE_PARTITIONS, determinePartitionsStatsGetter.getTotalMetrics()); @@ -453,7 +453,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler private Map getTaskCompletionRowStats() { - Map metrics = Maps.newHashMap(); + Map metrics = new HashMap<>(); if (determineConfigStatus != null) { metrics.put( RowIngestionMeters.DETERMINE_PARTITIONS, diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java index 3e9de7ff2bc..2031077f9f4 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java @@ -143,7 +143,7 @@ public abstract class HadoopTask extends AbstractTask Arrays.asList(((URLClassLoader) HadoopIndexTask.class.getClassLoader()).getURLs()) ); - final List extensionURLs = Lists.newArrayList(); + final List extensionURLs = new ArrayList<>(); for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) { final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false); extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs())); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java index f9fb0aa922c..c1e1397d1b0 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java @@ -31,7 +31,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import com.google.common.util.concurrent.ListenableFuture; @@ -298,7 +297,7 @@ public class IndexTask extends AbstractTask implements ChatHandler ) { IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper); - Map> events = Maps.newHashMap(); + Map> events = new HashMap<>(); boolean needsDeterminePartitions = false; boolean needsBuildSegments = false; @@ -346,9 +345,9 @@ public class IndexTask extends AbstractTask implements ChatHandler ) { IndexTaskUtils.datasourceAuthorizationCheck(req, Action.READ, getDataSource(), authorizerMapper); - Map returnMap = Maps.newHashMap(); - Map totalsMap = Maps.newHashMap(); - Map averagesMap = Maps.newHashMap(); + Map returnMap = new HashMap<>(); + Map totalsMap = new HashMap<>(); + Map averagesMap = new HashMap<>(); boolean needsDeterminePartitions = false; boolean needsBuildSegments = false; @@ -527,7 +526,7 @@ public class IndexTask extends AbstractTask implements ChatHandler private Map getTaskCompletionUnparseableEvents() { - Map unparseableEventsMap = Maps.newHashMap(); + Map unparseableEventsMap = new HashMap<>(); List determinePartitionsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions( determinePartitionsSavedParseExceptions); List buildSegmentsParseExceptionMessages = IndexTaskUtils.getMessagesFromSavedParseExceptions( @@ -543,7 +542,7 @@ public class IndexTask extends AbstractTask implements ChatHandler private Map getTaskCompletionRowStats() { - Map metrics = Maps.newHashMap(); + Map metrics = new HashMap<>(); metrics.put( RowIngestionMeters.DETERMINE_PARTITIONS, determinePartitionsMeters.getTotals() diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/MergeTaskBase.java index be15b4d42aa..3d940f2de3a 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/MergeTaskBase.java @@ -57,6 +57,7 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; /** */ @@ -335,8 +336,8 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask ) { final Interval mergedInterval = computeMergedInterval(segments); - final Set mergedDimensions = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); - final Set mergedMetrics = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); + final Set mergedDimensions = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); + final Set mergedMetrics = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); for (DataSegment segment : segments) { mergedDimensions.addAll(segment.getDimensions()); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java index 15fadda42a7..03af8570060 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ForkingTaskRunner.java @@ -75,6 +75,7 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -151,7 +152,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer return ImmutableList.of(); } - final List>> retVal = Lists.newArrayList(); + final List>> retVal = new ArrayList<>(); for (final String taskId : taskRestoreInfo.getRunningTasks()) { try { final File taskFile = new File(taskConfig.getTaskDir(taskId), "task.json"); @@ -275,7 +276,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer throw new ISE("TaskInfo already has processHolder for task[%s]!", task.getId()); } - final List command = Lists.newArrayList(); + final List command = new ArrayList<>(); final String taskClasspath; if (task.getClasspathPrefix() != null && !task.getClasspathPrefix().isEmpty()) { taskClasspath = Joiner.on(File.pathSeparator).join( @@ -608,7 +609,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer public Collection getRunningTasks() { synchronized (tasks) { - final List ret = Lists.newArrayList(); + final List ret = new ArrayList<>(); for (final ForkingTaskRunnerWorkItem taskWorkItem : tasks.values()) { if (taskWorkItem.processHolder != null) { ret.add(taskWorkItem); @@ -622,7 +623,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer public Collection getPendingTasks() { synchronized (tasks) { - final List ret = Lists.newArrayList(); + final List ret = new ArrayList<>(); for (final ForkingTaskRunnerWorkItem taskWorkItem : tasks.values()) { if (taskWorkItem.processHolder == null) { ret.add(taskWorkItem); @@ -702,7 +703,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer private void saveRunningTasks() { final File restoreFile = getRestoreFile(); - final List theTasks = Lists.newArrayList(); + final List theTasks = new ArrayList<>(); for (ForkingTaskRunnerWorkItem forkingTaskRunnerWorkItem : tasks.values()) { theTasks.add(forkingTaskRunnerWorkItem.getTaskId()); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/HeapMemoryTaskStorage.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/HeapMemoryTaskStorage.java index dc4c32d46f4..006286ac841 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/HeapMemoryTaskStorage.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/HeapMemoryTaskStorage.java @@ -24,7 +24,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Ordering; import com.google.inject.Inject; @@ -41,6 +40,7 @@ import org.joda.time.DateTime; import org.joda.time.Duration; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; @@ -55,7 +55,7 @@ public class HeapMemoryTaskStorage implements TaskStorage private final TaskStorageConfig config; private final ReentrantLock giant = new ReentrantLock(); - private final Map tasks = Maps.newHashMap(); + private final Map tasks = new HashMap<>(); private final Multimap taskLocks = HashMultimap.create(); private final Multimap taskActions = ArrayListMultimap.create(); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/PortFinder.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/PortFinder.java index 8ec7d467ff8..1a66d1fb398 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/PortFinder.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/PortFinder.java @@ -20,18 +20,18 @@ package org.apache.druid.indexing.overlord; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.ISE; import java.io.IOException; import java.net.ServerSocket; import java.net.SocketException; +import java.util.HashSet; import java.util.List; import java.util.Set; public class PortFinder { - private final Set usedPorts = Sets.newHashSet(); + private final Set usedPorts = new HashSet<>(); private final int startPort; private final int endPort; private final List candidatePorts; diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskLockbox.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskLockbox.java index 04ee705628a..96451b7c570 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskLockbox.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskLockbox.java @@ -27,9 +27,7 @@ import com.google.common.collect.ComparisonChain; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.indexing.common.TaskLock; import org.apache.druid.indexing.common.TaskLockType; @@ -46,6 +44,7 @@ import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -71,7 +70,7 @@ public class TaskLockbox // Multiple shared locks can be acquired for the same dataSource and interval. // Note that revoked locks are also maintained in this map to notify that those locks are revoked to the callers when // they acquire the same locks again. - private final Map>> running = Maps.newHashMap(); + private final Map>> running = new HashMap<>(); private final TaskStorage taskStorage; private final ReentrantLock giant = new ReentrantLock(true); private final Condition lockReleaseCondition = giant.newCondition(); @@ -80,7 +79,7 @@ public class TaskLockbox // Stores List of Active Tasks. TaskLockbox will only grant locks to active activeTasks. // this set should be accessed under the giant lock. - private final Set activeTasks = Sets.newHashSet(); + private final Set activeTasks = new HashSet<>(); @Inject public TaskLockbox( @@ -99,8 +98,8 @@ public class TaskLockbox try { // Load stuff from taskStorage first. If this fails, we don't want to lose all our locks. - final Set storedActiveTasks = Sets.newHashSet(); - final List> storedLocks = Lists.newArrayList(); + final Set storedActiveTasks = new HashSet<>(); + final List> storedLocks = new ArrayList<>(); for (final Task task : taskStorage.getActiveTasks()) { storedActiveTasks.add(task.getId()); for (final TaskLock taskLock : taskStorage.getLocks(task.getId())) { diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskQueue.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskQueue.java index 253894b84bd..082e1572872 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskQueue.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskQueue.java @@ -26,7 +26,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; @@ -47,7 +46,9 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.metadata.EntryExistsException; +import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -72,8 +73,8 @@ public class TaskQueue { private final long MANAGEMENT_WAIT_TIMEOUT_NANOS = TimeUnit.SECONDS.toNanos(60); - private final List tasks = Lists.newArrayList(); - private final Map> taskFutures = Maps.newHashMap(); + private final List tasks = new ArrayList<>(); + private final Map> taskFutures = new HashMap<>(); private final TaskQueueConfig config; private final TaskStorage taskStorage; @@ -232,7 +233,7 @@ public class TaskQueue try { // Task futures available from the taskRunner - final Map> runnerTaskFutures = Maps.newHashMap(); + final Map> runnerTaskFutures = new HashMap<>(); for (final TaskRunnerWorkItem workItem : taskRunner.getKnownTasks()) { runnerTaskFutures.put(workItem.getTaskId(), workItem.getResult()); } @@ -574,7 +575,7 @@ public class TaskQueue private static Map toTaskIDMap(List taskList) { - Map rv = Maps.newHashMap(); + Map rv = new HashMap<>(); for (Task task : taskList) { rv.put(task.getId(), task); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskStorageQueryAdapter.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskStorageQueryAdapter.java index 869ee4770de..fd61752a7f2 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskStorageQueryAdapter.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/TaskStorageQueryAdapter.java @@ -20,7 +20,6 @@ package org.apache.druid.indexing.overlord; import com.google.common.base.Optional; -import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.indexer.TaskInfo; import org.apache.druid.indexer.TaskStatus; @@ -32,6 +31,7 @@ import org.apache.druid.timeline.DataSegment; import org.joda.time.Duration; import javax.annotation.Nullable; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -96,7 +96,7 @@ public class TaskStorageQueryAdapter @Deprecated public Set getInsertedSegments(final String taskid) { - final Set segments = Sets.newHashSet(); + final Set segments = new HashSet<>(); for (final TaskAction action : storage.getAuditLogs(taskid)) { if (action instanceof SegmentInsertAction) { segments.addAll(((SegmentInsertAction) action).getSegments()); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ZkWorker.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ZkWorker.java index 2d55e592508..d9c5c956ccb 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ZkWorker.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/ZkWorker.java @@ -25,8 +25,6 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.PathChildrenCache; import org.apache.curator.framework.recipes.cache.PathChildrenCacheListener; @@ -39,6 +37,8 @@ import org.joda.time.DateTime; import java.io.Closeable; import java.io.IOException; import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; @@ -100,7 +100,7 @@ public class ZkWorker implements Closeable public Map getRunningTasks() { - Map retVal = Maps.newHashMap(); + Map retVal = new HashMap<>(); for (TaskAnnouncement taskAnnouncement : Lists.transform( statusCache.getCurrentData(), cacheConverter @@ -124,7 +124,7 @@ public class ZkWorker implements Closeable @JsonProperty("availabilityGroups") public Set getAvailabilityGroups() { - Set retVal = Sets.newHashSet(); + Set retVal = new HashSet<>(); for (TaskAnnouncement taskAnnouncement : getRunningTasks().values()) { retVal.add(taskAnnouncement.getTaskResource().getAvailabilityGroup()); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java index 79bf9e99d74..c908d282a30 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedWorkerProvisioningStrategy.java @@ -27,7 +27,6 @@ import com.google.common.collect.Collections2; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.indexing.common.task.Task; @@ -47,6 +46,8 @@ import org.joda.time.Duration; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -136,8 +137,8 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr private final WorkerTaskRunner runner; private final ScalingStats scalingStats = new ScalingStats(config.getNumEventsToTrack()); - private final Set currentlyProvisioning = Sets.newHashSet(); - private final Set currentlyTerminating = Sets.newHashSet(); + private final Set currentlyProvisioning = new HashSet<>(); + private final Set currentlyTerminating = new HashSet<>(); private DateTime lastProvisionTime = DateTimes.nowUtc(); private DateTime lastTerminateTime = lastProvisionTime; @@ -287,7 +288,7 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr ); log.debug("Valid workers: %d %s", validWorkers.size(), validWorkers); - Map workersMap = Maps.newHashMap(); + Map workersMap = new HashMap<>(); for (ImmutableWorkerInfo worker : validWorkers) { workersMap.put(worker.getWorker().getHost(), worker); } @@ -478,8 +479,8 @@ public class PendingTaskBasedWorkerProvisioningStrategy extends AbstractWorkerPr return new ImmutableWorkerInfo( new Worker(scheme, host, "-2", capacity, version), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java index 46fc55b5110..a17014c29c5 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/SimpleWorkerProvisioningStrategy.java @@ -42,6 +42,7 @@ import org.joda.time.DateTime; import org.joda.time.Duration; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; @@ -101,8 +102,8 @@ public class SimpleWorkerProvisioningStrategy extends AbstractWorkerProvisioning private final WorkerTaskRunner runner; private final ScalingStats scalingStats = new ScalingStats(config.getNumEventsToTrack()); - private final Set currentlyProvisioning = Sets.newHashSet(); - private final Set currentlyTerminating = Sets.newHashSet(); + private final Set currentlyProvisioning = new HashSet<>(); + private final Set currentlyTerminating = new HashSet<>(); private int targetWorkerCount = -1; private DateTime lastProvisionTime = DateTimes.nowUtc(); diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java index 175c8705dc3..30402377006 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/autoscaling/ec2/EC2AutoScaler.java @@ -41,6 +41,7 @@ import org.apache.druid.indexing.overlord.autoscaling.AutoScalingData; import org.apache.druid.indexing.overlord.autoscaling.SimpleWorkerProvisioningConfig; import org.apache.druid.java.util.emitter.EmittingLogger; +import java.util.ArrayList; import java.util.List; /** @@ -186,7 +187,7 @@ public class EC2AutoScaler implements AutoScaler public AutoScalingData terminate(List ips) { if (ips.isEmpty()) { - return new AutoScalingData(Lists.newArrayList()); + return new AutoScalingData(new ArrayList<>()); } DescribeInstancesResult result = amazonEC2Client.describeInstances( @@ -196,7 +197,7 @@ public class EC2AutoScaler implements AutoScaler ) ); - List instances = Lists.newArrayList(); + List instances = new ArrayList<>(); for (Reservation reservation : result.getReservations()) { instances.addAll(reservation.getInstances()); } @@ -227,7 +228,7 @@ public class EC2AutoScaler implements AutoScaler public AutoScalingData terminateWithIds(List ids) { if (ids.isEmpty()) { - return new AutoScalingData(Lists.newArrayList()); + return new AutoScalingData(new ArrayList<>()); } try { diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/WorkerHolder.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/WorkerHolder.java index 48c087c25f1..8c208c22471 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/WorkerHolder.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/hrtr/WorkerHolder.java @@ -22,7 +22,6 @@ package org.apache.druid.indexing.overlord.hrtr; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; -import com.google.common.collect.Sets; import org.apache.druid.indexer.TaskStatus; import org.apache.druid.indexing.common.task.Task; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; @@ -48,6 +47,7 @@ import org.joda.time.DateTime; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -145,7 +145,7 @@ public class WorkerHolder private Set getAvailabilityGroups() { - Set retVal = Sets.newHashSet(); + Set retVal = new HashSet<>(); for (TaskAnnouncement taskAnnouncement : getRunningTasks().values()) { retVal.add(taskAnnouncement.getTaskResource().getAvailabilityGroup()); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java index ad5b232d426..e26653b1568 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/overlord/http/OverlordResource.java @@ -24,7 +24,6 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.ByteSource; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; @@ -476,7 +475,7 @@ public class OverlordResource try { final Object ret = taskActionClient.submit(holder.getAction()); - retMap = Maps.newHashMap(); + retMap = new HashMap<>(); retMap.put("result", ret); } catch (IOException e) { @@ -707,7 +706,7 @@ public class OverlordResource } final List> allActiveTaskInfo; - final List allActiveTasks = Lists.newArrayList(); + final List allActiveTasks = new ArrayList<>(); if (state == null || !"complete".equals(StringUtils.toLowerCase(state))) { allActiveTaskInfo = taskStorageQueryAdapter.getActiveTaskInfo(dataSource); for (final TaskInfo task : allActiveTaskInfo) { @@ -796,7 +795,7 @@ public class OverlordResource .stream() .map(TaskRunnerWorkItem::getTaskId) .collect(Collectors.toSet()); - final List waitingTasks = Lists.newArrayList(); + final List waitingTasks = new ArrayList<>(); for (TaskRunnerWorkItem task : allTasks) { if (!runnerKnownTaskIds.contains(task.getTaskId())) { waitingTasks.add(((AnyTask) task).withTaskState( @@ -824,7 +823,7 @@ public class OverlordResource java.util.function.Function.identity(), (previousWorkItem, newWorkItem) -> newWorkItem )); - final List pendingTasks = Lists.newArrayList(); + final List pendingTasks = new ArrayList<>(); for (TaskRunnerWorkItem task : allTasks) { if (pendingTaskIds.contains(task.getTaskId())) { pendingTasks.add(((AnyTask) task).withTaskState( @@ -852,7 +851,7 @@ public class OverlordResource java.util.function.Function.identity(), (previousWorkItem, newWorkItem) -> newWorkItem )); - final List runningTasks = Lists.newArrayList(); + final List runningTasks = new ArrayList<>(); for (TaskRunnerWorkItem task : allTasks) { if (runningTaskIds.contains(task.getTaskId())) { runningTasks.add(((AnyTask) task).withTaskState( diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerCuratorCoordinator.java b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerCuratorCoordinator.java index ff5d4d4f3cc..53d14951b5d 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerCuratorCoordinator.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/worker/WorkerCuratorCoordinator.java @@ -22,7 +22,6 @@ package org.apache.druid.indexing.worker; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; import org.apache.curator.framework.CuratorFramework; @@ -38,6 +37,7 @@ import org.apache.druid.server.initialization.IndexerZkConfig; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -198,7 +198,7 @@ public class WorkerCuratorCoordinator public List getAnnouncements() throws Exception { - final List announcements = Lists.newArrayList(); + final List announcements = new ArrayList<>(); for (String id : curatorFramework.getChildren().forPath(getStatusPathForWorker())) { announcements.add( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java index 5e92b2af360..5c282421060 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java @@ -25,7 +25,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; @@ -151,6 +150,7 @@ import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.ArrayDeque; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -1591,7 +1591,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java index 40a3d4fa77d..a9616bf930f 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.io.Files; import org.apache.druid.data.input.impl.CSVParseSpec; @@ -137,8 +136,8 @@ public class IndexTaskTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("ts", "dim")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("ts", "dim", "val"), @@ -672,8 +671,8 @@ public class IndexTaskTest ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, null, @@ -724,8 +723,8 @@ public class IndexTaskTest ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "dim", "val"), @@ -933,8 +932,8 @@ public class IndexTaskTest ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "dim", "val"), @@ -986,8 +985,8 @@ public class IndexTaskTest ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "dim", "val"), @@ -1078,8 +1077,8 @@ public class IndexTaskTest new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, null @@ -1200,8 +1199,8 @@ public class IndexTaskTest new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "dim", "dimLong", "dimFloat", "val"), @@ -1315,8 +1314,8 @@ public class IndexTaskTest new LongDimensionSchema("dimLong"), new FloatDimensionSchema("dimFloat") ), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "dim", "dimLong", "dimFloat", "val"), @@ -1484,8 +1483,8 @@ public class IndexTaskTest ), new DimensionsSpec( null, - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("time", "", ""), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java index bb6867755b7..73b5a2ce707 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -25,8 +25,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; @@ -140,8 +138,10 @@ import javax.annotation.Nullable; import java.io.File; import java.nio.file.Files; import java.util.ArrayDeque; +import java.util.ArrayList; import java.util.Arrays; import java.util.Deque; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -607,7 +607,7 @@ public class RealtimeIndexTaskTest Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); // Nothing should be published. - Assert.assertEquals(Sets.newHashSet(), mdc.getPublished()); + Assert.assertEquals(new HashSet<>(), mdc.getPublished()); } // Second run: @@ -795,7 +795,7 @@ public class RealtimeIndexTaskTest Assert.assertEquals(TaskState.SUCCESS, taskStatus.getStatusCode()); // Nothing should be published. - Assert.assertEquals(Sets.newHashSet(), mdc.getPublished()); + Assert.assertEquals(new HashSet<>(), mdc.getPublished()); } // Corrupt the data: @@ -1061,7 +1061,7 @@ public class RealtimeIndexTaskTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; final TaskToolboxFactory toolboxFactory = new TaskToolboxFactory( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/SameIntervalMergeTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/SameIntervalMergeTaskTest.java index d8ab412ec3a..6284503d901 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/SameIntervalMergeTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/SameIntervalMergeTaskTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.common.task; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.druid.indexing.common.TaskLock; import org.apache.druid.indexing.common.TaskLockType; import org.apache.druid.indexing.common.TaskToolbox; @@ -53,6 +52,7 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.net.URI; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -138,7 +138,7 @@ public class SameIntervalMergeTaskTest }); // ensure LockTryAcquireAction is submitted Assert.assertTrue(isReady); - final List segments = Lists.newArrayList(); + final List segments = new ArrayList<>(); mergeTask.run( new TaskToolbox( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java index 024a5b7e222..6c932da85da 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/AbstractParallelIndexSupervisorTaskTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.common.task.batch.parallel; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.indexing.IndexingServiceClient; @@ -67,6 +66,7 @@ import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -86,8 +86,8 @@ public class AbstractParallelIndexSupervisorTaskTest extends IngestionTestBase ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("ts", "dim")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("ts", "dim", "val"), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSupervisorTaskSerdeTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSupervisorTaskSerdeTest.java index 3cbf152be2a..dd9096310d8 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSupervisorTaskSerdeTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/batch/parallel/ParallelIndexSupervisorTaskSerdeTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.common.task.batch.parallel; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; -import com.google.common.collect.Lists; import org.apache.druid.client.indexing.NoopIndexingServiceClient; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -49,6 +48,7 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -67,8 +67,8 @@ public class ParallelIndexSupervisorTaskSerdeTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("ts", "dim")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), null, Arrays.asList("ts", "dim", "val"), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 97ed98b7a71..7fc311a3f6d 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -26,8 +26,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.io.Files; import com.google.inject.Binder; import com.google.inject.Module; @@ -111,6 +109,7 @@ import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -174,8 +173,8 @@ public class IngestSegmentFirehoseFactoryTest final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null) { - private final Set published = Sets.newHashSet(); - private final Set nuked = Sets.newHashSet(); + private final Set published = new HashSet<>(); + private final Set nuked = new HashSet<>(); @Override public List getUsedSegmentsForInterval(String dataSource, Interval interval) @@ -198,7 +197,7 @@ public class IngestSegmentFirehoseFactoryTest @Override public Set announceHistoricalSegments(Set segments) { - Set added = Sets.newHashSet(); + Set added = new HashSet<>(); for (final DataSegment segment : segments) { if (published.add(segment)) { added.add(segment); @@ -234,7 +233,7 @@ public class IngestSegmentFirehoseFactoryTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java index 10731211978..3343151c110 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTimelineTest.java @@ -24,8 +24,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.Firehose; @@ -79,8 +77,10 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -172,7 +172,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest ) { final File tmpDir = Files.createTempDir(); - final Set segments = Sets.newHashSet(); + final Set segments = new HashSet<>(); for (DataSegmentMaker segmentMaker : segmentMakers) { segments.add(segmentMaker.make(tmpDir)); } @@ -279,7 +279,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest ) /* Segment intersecting desired interval */ ); - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (final TestCase testCase : testCases) { final TaskActionClient taskActionClient = new TaskActionClient() @@ -309,7 +309,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; final TaskToolboxFactory taskToolboxFactory = new TaskToolboxFactory( @@ -398,7 +398,7 @@ public class IngestSegmentFirehoseFactoryTimelineTest @Override public String toString() { - final List segmentIdentifiers = Lists.newArrayList(); + final List segmentIdentifiers = new ArrayList<>(); for (DataSegment segment : segments) { segmentIdentifiers.add(segment.getIdentifier()); } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java index 602107aa635..0f1a2f60cfc 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java @@ -140,6 +140,7 @@ import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.net.URI; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; @@ -550,7 +551,7 @@ public class TaskLifecycleTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }; return new TaskToolboxFactory( @@ -854,7 +855,7 @@ public class TaskLifecycleTest mdc.setUnusedSegments(expectedUnusedSegments); // manually create local segments files - List segmentFiles = Lists.newArrayList(); + List segmentFiles = new ArrayList<>(); for (DataSegment segment : mdc.getUnusedSegmentsForInterval("test_kill_task", Intervals.of("2011-04-01/P4D"))) { File file = new File((String) segment.getLoadSpec().get("path")); file.mkdirs(); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java index e81804c4afe..efdc794ae04 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/EC2AutoScalerTest.java @@ -44,6 +44,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -57,7 +58,7 @@ public class EC2AutoScalerTest private static final String INSTANCE_ID = "theInstance"; public static final EC2EnvironmentConfig ENV_CONFIG = new EC2EnvironmentConfig( "us-east-1a", - new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, Lists.newArrayList(), "foo", "mySubnet", null, null), + new EC2NodeData(AMI_ID, INSTANCE_ID, 1, 1, new ArrayList<>(), "foo", "mySubnet", null, null), new GalaxyEC2UserData(new DefaultObjectMapper(), "env", "version", "type") ); private static final String IP = "dummyIP"; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java index 1476016a064..2fb0949c96f 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/PendingTaskBasedProvisioningStrategyTest.java @@ -21,8 +21,6 @@ package org.apache.druid.indexing.overlord.autoscaling; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.common.guava.DSuppliers; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskStatus; @@ -51,8 +49,10 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; @@ -111,11 +111,11 @@ public class PendingTaskBasedProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + new ArrayList<>() ); EasyMock.expect(runner.getWorkers()).andReturn( Collections.emptyList() @@ -142,11 +142,11 @@ public class PendingTaskBasedProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + new ArrayList<>() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( @@ -176,11 +176,11 @@ public class PendingTaskBasedProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(3); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(5); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); // No pending tasks EasyMock.expect(runner.getPendingTaskPayloads()).andReturn( - Lists.newArrayList() + new ArrayList<>() ); // 1 node already running, only provision 2 more. EasyMock.expect(runner.getWorkers()).andReturn( @@ -211,7 +211,7 @@ public class PendingTaskBasedProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()).times(2); + .andReturn(new ArrayList()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ); @@ -268,7 +268,7 @@ public class PendingTaskBasedProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(1); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()).times(2); + .andReturn(new ArrayList()).times(2); EasyMock.expect(autoScaler.terminateWithIds(EasyMock.anyObject())) .andReturn(null); EasyMock.expect(autoScaler.provision()).andReturn( @@ -324,9 +324,9 @@ public class PendingTaskBasedProvisioningStrategyTest { EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( - new AutoScalingData(Lists.newArrayList()) + new AutoScalingData(new ArrayList<>()) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); @@ -348,7 +348,7 @@ public class PendingTaskBasedProvisioningStrategyTest ).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -380,7 +380,7 @@ public class PendingTaskBasedProvisioningStrategyTest new TestZkWorker(testTask).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).toImmutable().getWorker())); EasyMock.replay(runner); @@ -429,7 +429,7 @@ public class PendingTaskBasedProvisioningStrategyTest ).times(2); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -474,7 +474,7 @@ public class PendingTaskBasedProvisioningStrategyTest ).times(3); EasyMock.expect(runner.getConfig()).andReturn(new RemoteTaskRunnerConfig()).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -586,7 +586,7 @@ public class PendingTaskBasedProvisioningStrategyTest public Map getRunningTasks() { if (testTask == null) { - return Maps.newHashMap(); + return new HashMap<>(); } return ImmutableMap.of( testTask.getId(), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java index 7dbd5fe6f90..7796dfcb417 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/autoscaling/SimpleProvisioningStrategyTest.java @@ -21,8 +21,6 @@ package org.apache.druid.indexing.overlord.autoscaling; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.common.guava.DSuppliers; import org.apache.druid.indexer.TaskLocation; import org.apache.druid.indexer.TaskStatus; @@ -50,8 +48,10 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicReference; @@ -115,7 +115,7 @@ public class SimpleProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("aNode")) ); @@ -153,7 +153,7 @@ public class SimpleProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()).times(2); + .andReturn(new ArrayList()).times(2); EasyMock.expect(autoScaler.provision()).andReturn( new AutoScalingData(Collections.singletonList("fake")) ); @@ -209,7 +209,7 @@ public class SimpleProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0).times(2); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(2).times(2); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()).times(2); + .andReturn(new ArrayList()).times(2); EasyMock.expect(autoScaler.terminateWithIds(EasyMock.anyObject())) .andReturn(null); EasyMock.expect(autoScaler.provision()).andReturn( @@ -264,9 +264,9 @@ public class SimpleProvisioningStrategyTest EasyMock.expect(autoScaler.getMinNumWorkers()).andReturn(0); EasyMock.expect(autoScaler.getMaxNumWorkers()).andReturn(1); EasyMock.expect(autoScaler.ipToIdLookup(EasyMock.anyObject())) - .andReturn(Lists.newArrayList()); + .andReturn(new ArrayList()); EasyMock.expect(autoScaler.terminate(EasyMock.anyObject())).andReturn( - new AutoScalingData(Lists.newArrayList()) + new AutoScalingData(new ArrayList<>()) ); EasyMock.replay(autoScaler); RemoteTaskRunner runner = EasyMock.createMock(RemoteTaskRunner.class); @@ -283,7 +283,7 @@ public class SimpleProvisioningStrategyTest ).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.replay(runner); Provisioner provisioner = strategy.makeProvisioner(runner); @@ -322,7 +322,7 @@ public class SimpleProvisioningStrategyTest new TestZkWorker(testTask).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()).times(2); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()).times(2); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.singletonList(new TestZkWorker(testTask).getWorker())); EasyMock.replay(runner); @@ -371,7 +371,7 @@ public class SimpleProvisioningStrategyTest new TestZkWorker(NoopTask.create()).toImmutable() ) ).times(2); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -415,7 +415,7 @@ public class SimpleProvisioningStrategyTest new TestZkWorker(NoopTask.create(), "http", "h1", "i1", "0").toImmutable() ) ).times(3); - EasyMock.expect(runner.getLazyWorkers()).andReturn(Lists.newArrayList()); + EasyMock.expect(runner.getLazyWorkers()).andReturn(new ArrayList<>()); EasyMock.expect(runner.markWorkersLazy(EasyMock.anyObject(), EasyMock.anyInt())) .andReturn(Collections.emptyList()); EasyMock.replay(runner); @@ -516,7 +516,7 @@ public class SimpleProvisioningStrategyTest public Map getRunningTasks() { if (testTask == null) { - return Maps.newHashMap(); + return new HashMap<>(); } return ImmutableMap.of( testTask.getId(), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java index 7652d1f807e..29294b8ca20 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWithAffinityWorkerSelectStrategyTest.java @@ -22,7 +22,6 @@ package org.apache.druid.indexing.overlord.setup; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; import org.apache.druid.indexing.overlord.config.RemoteTaskRunnerConfig; @@ -32,6 +31,8 @@ import org.apache.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Test; +import java.util.HashSet; + public class EqualDistributionWithAffinityWorkerSelectStrategyTest { @Test @@ -55,29 +56,29 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest "localhost0", new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -99,15 +100,15 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -129,8 +130,8 @@ public class EqualDistributionWithAffinityWorkerSelectStrategyTest "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java index 0c1f8b3034c..e68d0a9312a 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/EqualDistributionWorkerSelectStrategyTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.overlord.setup; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; import org.apache.druid.indexing.overlord.config.RemoteTaskRunnerConfig; @@ -30,6 +29,8 @@ import org.apache.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; +import java.util.HashSet; + public class EqualDistributionWorkerSelectStrategyTest { private static final ImmutableMap WORKERS_FOR_AFFINITY_TESTS = @@ -37,29 +38,29 @@ public class EqualDistributionWorkerSelectStrategyTest "localhost0", new ImmutableWorkerInfo( new Worker("http", "localhost0", "localhost0", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost1", new ImmutableWorkerInfo( new Worker("http", "localhost1", "localhost1", 2, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost2", new ImmutableWorkerInfo( new Worker("http", "localhost2", "localhost2", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost3", new ImmutableWorkerInfo( new Worker("http", "localhost3", "localhost3", 2, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ); @@ -75,15 +76,15 @@ public class EqualDistributionWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 1, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -110,15 +111,15 @@ public class EqualDistributionWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 5, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -146,15 +147,15 @@ public class EqualDistributionWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 2, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -182,15 +183,15 @@ public class EqualDistributionWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "disableHost", "disableHost", 10, DISABLED_VERSION), 5, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "enableHost", "enableHost", 10, "v1"), 5, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java index a4abdfcddac..73599f502ed 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/setup/FillCapacityWithAffinityWorkerSelectStrategyTest.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.overlord.setup; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.apache.druid.indexing.common.task.NoopTask; import org.apache.druid.indexing.overlord.ImmutableWorkerInfo; import org.apache.druid.indexing.overlord.config.RemoteTaskRunnerConfig; @@ -30,6 +29,8 @@ import org.apache.druid.java.util.common.DateTimes; import org.junit.Assert; import org.junit.Test; +import java.util.HashSet; + public class FillCapacityWithAffinityWorkerSelectStrategyTest { @Test @@ -45,15 +46,15 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -82,15 +83,15 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest "lhost", new ImmutableWorkerInfo( new Worker("http", "lhost", "lhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ), "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), @@ -112,8 +113,8 @@ public class FillCapacityWithAffinityWorkerSelectStrategyTest "localhost", new ImmutableWorkerInfo( new Worker("http", "localhost", "localhost", 1, "v1"), 0, - Sets.newHashSet(), - Sets.newHashSet(), + new HashSet<>(), + new HashSet<>(), DateTimes.nowUtc() ) ), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResourceTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResourceTest.java index d893898c5a1..522307edd4d 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResourceTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorResourceTest.java @@ -24,7 +24,6 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; import org.apache.druid.indexing.overlord.DataSourceMetadata; import org.apache.druid.indexing.overlord.TaskMaster; import org.apache.druid.java.util.common.DateTimes; @@ -46,6 +45,7 @@ import org.junit.runner.RunWith; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.core.Response; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -491,7 +491,7 @@ public class SupervisorResourceTest extends EasyMockSupport "v3" ) ); - Map> history = Maps.newHashMap(); + Map> history = new HashMap<>(); history.put("id1", versions1); history.put("id2", versions2); history.put("id3", versions3); @@ -603,7 +603,7 @@ public class SupervisorResourceTest extends EasyMockSupport ) ); - Map> history = Maps.newHashMap(); + Map> history = new HashMap<>(); history.put("id1", versions1); history.put("id2", versions2); history.put("id3", versions3); @@ -626,7 +626,7 @@ public class SupervisorResourceTest extends EasyMockSupport Response response = supervisorResource.specGetAllHistory(request); - Map> filteredHistory = Maps.newHashMap(); + Map> filteredHistory = new HashMap<>(); filteredHistory.put("id1", versions1); filteredHistory.put( "id3", @@ -700,7 +700,7 @@ public class SupervisorResourceTest extends EasyMockSupport "v2" ) ); - Map> history = Maps.newHashMap(); + Map> history = new HashMap<>(); history.put("id1", versions1); history.put("id2", versions2); @@ -797,7 +797,7 @@ public class SupervisorResourceTest extends EasyMockSupport "tombstone" ) ); - Map> history = Maps.newHashMap(); + Map> history = new HashMap<>(); history.put("id1", versions1); history.put("id2", versions2); history.put("id3", versions3); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java index 07afca7dc8b..47370fdcef7 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/test/TestIndexerMetadataStorageCoordinator.java @@ -21,7 +21,6 @@ package org.apache.druid.indexing.test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import org.apache.druid.indexing.overlord.DataSourceMetadata; import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator; @@ -31,6 +30,8 @@ import org.apache.druid.segment.realtime.appenderator.SegmentIdentifier; import org.apache.druid.timeline.DataSegment; import org.joda.time.Interval; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -42,7 +43,7 @@ public class TestIndexerMetadataStorageCoordinator implements IndexerMetadataSto public TestIndexerMetadataStorageCoordinator() { - unusedSegments = Lists.newArrayList(); + unusedSegments = new ArrayList<>(); } @Override @@ -98,7 +99,7 @@ public class TestIndexerMetadataStorageCoordinator implements IndexerMetadataSto @Override public Set announceHistoricalSegments(Set segments) { - Set added = Sets.newHashSet(); + Set added = new HashSet<>(); for (final DataSegment segment : segments) { if (published.add(segment)) { added.add(segment); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java index 8303f5d3173..20eb5fc3d67 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -22,7 +22,6 @@ package org.apache.druid.indexing.worker; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.base.Joiner; -import com.google.common.collect.Lists; import com.google.common.io.Files; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; @@ -63,6 +62,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.List; /** @@ -177,7 +177,7 @@ public class WorkerTaskMonitorTest @Override public List getLocations() { - return Lists.newArrayList(); + return new ArrayList<>(); } }, jsonMapper diff --git a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java index dc5017f48db..5935efb9e16 100644 --- a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java +++ b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITUnionQueryTest.java @@ -19,7 +19,6 @@ package org.apache.druid.tests.indexer; -import com.beust.jcommander.internal.Lists; import com.google.inject.Inject; import org.apache.druid.curator.discovery.ServerDiscoveryFactory; import org.apache.druid.curator.discovery.ServerDiscoverySelector; @@ -46,6 +45,7 @@ import org.testng.annotations.Test; import java.io.IOException; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; @@ -84,7 +84,7 @@ public class ITUnionQueryTest extends AbstractIndexerTest getTaskAsString(UNION_TASK_RESOURCE), DateTimes.utc(System.currentTimeMillis() + TimeUnit.MINUTES.toMillis(3)) ); - List taskIDs = Lists.newArrayList(); + List taskIDs = new ArrayList<>(); for (int i = 0; i < numTasks; i++) { taskIDs.add( indexer.submitTask( diff --git a/processing/src/main/java/org/apache/druid/collections/spatial/Point.java b/processing/src/main/java/org/apache/druid/collections/spatial/Point.java index 6e2bf0ec79a..0e95dd1bd05 100644 --- a/processing/src/main/java/org/apache/druid/collections/spatial/Point.java +++ b/processing/src/main/java/org/apache/druid/collections/spatial/Point.java @@ -19,10 +19,10 @@ package org.apache.druid.collections.spatial; -import com.google.common.collect.Lists; import org.apache.druid.collections.bitmap.BitmapFactory; import org.apache.druid.collections.bitmap.MutableBitmap; +import java.util.ArrayList; import java.util.List; /** @@ -83,7 +83,7 @@ public class Point extends Node @Override public List getChildren() { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override diff --git a/processing/src/main/java/org/apache/druid/guice/GuiceInjectors.java b/processing/src/main/java/org/apache/druid/guice/GuiceInjectors.java index 46f61f65614..c40498e6b3f 100644 --- a/processing/src/main/java/org/apache/druid/guice/GuiceInjectors.java +++ b/processing/src/main/java/org/apache/druid/guice/GuiceInjectors.java @@ -20,12 +20,12 @@ package org.apache.druid.guice; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Module; import org.apache.druid.jackson.JacksonModule; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -57,7 +57,7 @@ public class GuiceInjectors public static Injector makeStartupInjectorWithModules(Iterable modules) { - List theModules = Lists.newArrayList(); + List theModules = new ArrayList<>(); theModules.addAll(makeDefaultStartupModules()); for (Module theModule : modules) { theModules.add(theModule); diff --git a/processing/src/main/java/org/apache/druid/query/BaseQuery.java b/processing/src/main/java/org/apache/druid/query/BaseQuery.java index bc90e4e96af..41264dbbccc 100644 --- a/processing/src/main/java/org/apache/druid/query/BaseQuery.java +++ b/processing/src/main/java/org/apache/druid/query/BaseQuery.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import org.apache.druid.guice.annotations.ExtensionPoint; import org.apache.druid.java.util.common.granularity.Granularities; @@ -37,6 +36,7 @@ import org.joda.time.Interval; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.TreeMap; /** */ @@ -205,7 +205,7 @@ public abstract class BaseQuery> implements Query final Map overrides ) { - Map overridden = Maps.newTreeMap(); + Map overridden = new TreeMap<>(); if (context != null) { overridden.putAll(context); } diff --git a/processing/src/main/java/org/apache/druid/query/Druids.java b/processing/src/main/java/org/apache/druid/query/Druids.java index 5070d8a2a96..2c2a6537b85 100644 --- a/processing/src/main/java/org/apache/druid/query/Druids.java +++ b/processing/src/main/java/org/apache/druid/query/Druids.java @@ -53,6 +53,7 @@ import org.apache.druid.segment.VirtualColumns; import org.joda.time.Interval; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; @@ -116,8 +117,8 @@ public class Druids virtualColumns = null; dimFilter = null; granularity = Granularities.ALL; - aggregatorSpecs = Lists.newArrayList(); - postAggregatorSpecs = Lists.newArrayList(); + aggregatorSpecs = new ArrayList<>(); + postAggregatorSpecs = new ArrayList<>(); limit = 0; context = null; } @@ -764,8 +765,8 @@ public class Druids context = null; dimFilter = null; granularity = Granularities.ALL; - dimensions = Lists.newArrayList(); - metrics = Lists.newArrayList(); + dimensions = new ArrayList<>(); + metrics = new ArrayList<>(); virtualColumns = null; pagingSpec = null; } diff --git a/processing/src/main/java/org/apache/druid/query/ExecutorServiceMonitor.java b/processing/src/main/java/org/apache/druid/query/ExecutorServiceMonitor.java index 4a205826837..ad2f672b53d 100644 --- a/processing/src/main/java/org/apache/druid/query/ExecutorServiceMonitor.java +++ b/processing/src/main/java/org/apache/druid/query/ExecutorServiceMonitor.java @@ -19,12 +19,12 @@ package org.apache.druid.query; -import com.google.common.collect.Lists; import com.google.inject.Inject; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.java.util.metrics.AbstractMonitor; +import java.util.ArrayList; import java.util.List; public class ExecutorServiceMonitor extends AbstractMonitor @@ -36,7 +36,7 @@ public class ExecutorServiceMonitor extends AbstractMonitor @Inject public ExecutorServiceMonitor() { - this.metricEmitters = Lists.newArrayList(); + this.metricEmitters = new ArrayList<>(); this.metricBuilder = new ServiceMetricEvent.Builder(); } diff --git a/processing/src/main/java/org/apache/druid/query/IntervalChunkingQueryRunner.java b/processing/src/main/java/org/apache/druid/query/IntervalChunkingQueryRunner.java index 30e77325aec..4cf31a04b10 100644 --- a/processing/src/main/java/org/apache/druid/query/IntervalChunkingQueryRunner.java +++ b/processing/src/main/java/org/apache/druid/query/IntervalChunkingQueryRunner.java @@ -32,6 +32,7 @@ import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; +import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -132,7 +133,7 @@ public class IntervalChunkingQueryRunner implements QueryRunner return Collections.singletonList(interval); } - List intervals = Lists.newArrayList(); + List intervals = new ArrayList<>(); Iterator timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator(); DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart()); diff --git a/processing/src/main/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunner.java b/processing/src/main/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunner.java index f1b03a7cbdd..471b4cdeaf6 100644 --- a/processing/src/main/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunner.java +++ b/processing/src/main/java/org/apache/druid/query/ReportTimelineMissingSegmentQueryRunner.java @@ -19,10 +19,10 @@ package org.apache.druid.query; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -43,7 +43,7 @@ public class ReportTimelineMissingSegmentQueryRunner implements QueryRunner missingSegments = (List) responseContext.get(Result.MISSING_SEGMENTS_KEY); if (missingSegments == null) { - missingSegments = Lists.newArrayList(); + missingSegments = new ArrayList<>(); responseContext.put(Result.MISSING_SEGMENTS_KEY, missingSegments); } missingSegments.add(descriptor); diff --git a/processing/src/main/java/org/apache/druid/query/RetryQueryRunner.java b/processing/src/main/java/org/apache/druid/query/RetryQueryRunner.java index 1d440469dc1..5410603295b 100644 --- a/processing/src/main/java/org/apache/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/org/apache/druid/query/RetryQueryRunner.java @@ -22,7 +22,6 @@ package org.apache.druid.query; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.guava.MergeSequence; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; @@ -33,6 +32,7 @@ import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.spec.MultipleSpecificSegmentSpec; import org.apache.druid.segment.SegmentMissingException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -58,7 +58,7 @@ public class RetryQueryRunner implements QueryRunner @Override public Sequence run(final QueryPlus queryPlus, final Map context) { - final List> listOfSequences = Lists.newArrayList(); + final List> listOfSequences = new ArrayList<>(); listOfSequences.add(baseRunner.run(queryPlus, context)); return new YieldingSequenceBase() @@ -72,7 +72,7 @@ public class RetryQueryRunner implements QueryRunner for (int i = 0; i < config.getNumTries(); i++) { log.info("[%,d] missing segments found. Retry attempt [%,d]", missingSegments.size(), i); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); final QueryPlus retryQueryPlus = queryPlus.withQuerySegmentSpec( new MultipleSpecificSegmentSpec( missingSegments @@ -104,7 +104,7 @@ public class RetryQueryRunner implements QueryRunner { final Object maybeMissingSegments = context.get(Result.MISSING_SEGMENTS_KEY); if (maybeMissingSegments == null) { - return Lists.newArrayList(); + return new ArrayList<>(); } return jsonMapper.convertValue( diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java b/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java index 33a8296ad12..1d291b7360d 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java @@ -154,7 +154,7 @@ public class AggregatorUtil dependencySet.addAll(postAggregator.getDependentFields()); } - List condensedAggs = Lists.newArrayList(); + List condensedAggs = new ArrayList<>(); for (AggregatorFactory aggregatorSpec : aggList) { if (dependencySet.contains(aggregatorSpec.getName())) { condensedAggs.add(aggregatorSpec); diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/HistogramAggregatorFactory.java b/processing/src/main/java/org/apache/druid/query/aggregation/HistogramAggregatorFactory.java index 8838442b231..5a1e6dd4c44 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/HistogramAggregatorFactory.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/HistogramAggregatorFactory.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.apache.commons.codec.binary.Base64; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.ColumnSelectorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.segment.ColumnValueSelector; import javax.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; @@ -55,7 +55,7 @@ public class HistogramAggregatorFactory extends AggregatorFactory this.name = name; this.fieldName = fieldName; - this.breaksList = (breaksList == null) ? Lists.newArrayList() : breaksList; + this.breaksList = (breaksList == null) ? new ArrayList<>() : breaksList; this.breaks = new float[this.breaksList.size()]; for (int i = 0; i < this.breaksList.size(); ++i) { this.breaks[i] = this.breaksList.get(i); diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java index 2ca65470e07..556d39fe89e 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java @@ -22,8 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.IAE; import org.apache.druid.query.Queries; @@ -32,6 +30,8 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -93,7 +93,7 @@ public class ArithmeticPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } @@ -241,7 +241,7 @@ public class ArithmeticPostAggregator implements PostAggregator } }; - private static final Map lookupMap = Maps.newHashMap(); + private static final Map lookupMap = new HashMap<>(); static { for (Ops op : Ops.values()) { diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ConstantPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ConstantPostAggregator.java index ac0c56da57b..af8cd002e1e 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ConstantPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ConstantPostAggregator.java @@ -22,13 +22,13 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.guava.Comparators; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -52,7 +52,7 @@ public class ConstantPostAggregator implements PostAggregator @Override public Set getDependentFields() { - return Sets.newHashSet(); + return new HashSet<>(); } @Override diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleGreatestPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleGreatestPostAggregator.java index 8bf77d36663..1e017b13c08 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleGreatestPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleGreatestPostAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -60,7 +60,7 @@ public class DoubleGreatestPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleLeastPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleLeastPostAggregator.java index 51da6588a70..1a143faed40 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleLeastPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/DoubleLeastPostAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -60,7 +60,7 @@ public class DoubleLeastPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/LongGreatestPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/LongGreatestPostAggregator.java index ad4af00b05c..89ca17103ce 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/LongGreatestPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/LongGreatestPostAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -60,7 +60,7 @@ public class LongGreatestPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/LongLeastPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/LongLeastPostAggregator.java index c5fe66ce73f..3ff47e92303 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/LongLeastPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/LongLeastPostAggregator.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.query.Queries; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -30,6 +29,7 @@ import org.apache.druid.query.aggregation.PostAggregator; import org.apache.druid.query.cache.CacheKeyBuilder; import java.util.Comparator; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -60,7 +60,7 @@ public class LongLeastPostAggregator implements PostAggregator @Override public Set getDependentFields() { - Set dependentFields = Sets.newHashSet(); + Set dependentFields = new HashSet<>(); for (PostAggregator field : fields) { dependentFields.addAll(field.getDependentFields()); } diff --git a/processing/src/main/java/org/apache/druid/query/cache/CacheKeyBuilder.java b/processing/src/main/java/org/apache/druid/query/cache/CacheKeyBuilder.java index 97f0f14816b..03122e63335 100644 --- a/processing/src/main/java/org/apache/druid/query/cache/CacheKeyBuilder.java +++ b/processing/src/main/java/org/apache/druid/query/cache/CacheKeyBuilder.java @@ -30,6 +30,7 @@ import org.apache.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -173,7 +174,7 @@ public class CacheKeyBuilder } } - private final List items = Lists.newArrayList(); + private final List items = new ArrayList<>(); private final byte id; private int size; diff --git a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java index 96662ee32ed..37e6a48ea48 100644 --- a/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java +++ b/processing/src/main/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQuery.java @@ -21,7 +21,6 @@ package org.apache.druid.query.datasourcemetadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.BaseQuery; @@ -34,6 +33,7 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.apache.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -101,7 +101,7 @@ public class DataSourceMetadataQuery extends BaseQuery(); } DateTime max = DateTimes.MIN; diff --git a/processing/src/main/java/org/apache/druid/query/filter/AndDimFilter.java b/processing/src/main/java/org/apache/druid/query/filter/AndDimFilter.java index 8161241457a..9537ff89070 100644 --- a/processing/src/main/java/org/apache/druid/query/filter/AndDimFilter.java +++ b/processing/src/main/java/org/apache/druid/query/filter/AndDimFilter.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import com.google.common.collect.TreeRangeSet; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.filter.AndFilter; @@ -102,7 +101,7 @@ public class AndDimFilter implements DimFilter @Override public HashSet getRequiredColumns() { - HashSet requiredColumns = Sets.newHashSet(); + HashSet requiredColumns = new HashSet<>(); fields.stream() .forEach(field -> requiredColumns.addAll(field.getRequiredColumns())); return requiredColumns; diff --git a/processing/src/main/java/org/apache/druid/query/filter/DimFilterUtils.java b/processing/src/main/java/org/apache/druid/query/filter/DimFilterUtils.java index 5c9795f0eae..03966e94444 100644 --- a/processing/src/main/java/org/apache/druid/query/filter/DimFilterUtils.java +++ b/processing/src/main/java/org/apache/druid/query/filter/DimFilterUtils.java @@ -21,13 +21,12 @@ package org.apache.druid.query.filter; import com.google.common.base.Function; import com.google.common.base.Optional; -import com.google.common.collect.Maps; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import org.apache.druid.timeline.partition.ShardSpec; import java.nio.ByteBuffer; import java.util.HashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -117,14 +116,14 @@ public class DimFilterUtils public static Set filterShards(DimFilter dimFilter, Iterable input, Function converter, Map>> dimensionRangeCache) { - Set retSet = Sets.newLinkedHashSet(); + Set retSet = new LinkedHashSet<>(); for (T obj : input) { ShardSpec shard = converter.apply(obj); boolean include = true; if (dimFilter != null && shard != null) { - Map> filterDomain = Maps.newHashMap(); + Map> filterDomain = new HashMap<>(); List dimensions = shard.getDomainDimensions(); for (String dimension : dimensions) { Optional> optFilterRangeSet = dimensionRangeCache diff --git a/processing/src/main/java/org/apache/druid/query/filter/OrDimFilter.java b/processing/src/main/java/org/apache/druid/query/filter/OrDimFilter.java index 337c9e7e35d..ce8943139a4 100644 --- a/processing/src/main/java/org/apache/druid/query/filter/OrDimFilter.java +++ b/processing/src/main/java/org/apache/druid/query/filter/OrDimFilter.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import com.google.common.collect.TreeRangeSet; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.filter.Filters; @@ -110,7 +109,7 @@ public class OrDimFilter implements DimFilter @Override public HashSet getRequiredColumns() { - HashSet requiredColumns = Sets.newHashSet(); + HashSet requiredColumns = new HashSet<>(); fields.stream() .forEach(field -> requiredColumns.addAll(field.getRequiredColumns())); return requiredColumns; diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java index abd5c9a5d6f..9975c817025 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQuery.java @@ -29,7 +29,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import org.apache.druid.data.input.Row; import org.apache.druid.java.util.common.IAE; @@ -701,7 +700,7 @@ public class GroupByQuery extends BaseQuery List postAggregators ) { - final Set outputNames = Sets.newHashSet(); + final Set outputNames = new HashSet<>(); for (DimensionSpec dimension : dimensions) { if (!outputNames.add(dimension.getOutputName())) { throw new IAE("Duplicate output name[%s]", dimension.getOutputName()); @@ -752,7 +751,7 @@ public class GroupByQuery extends BaseQuery private List> subtotalsSpec = null; private LimitSpec limitSpec = null; private Function, Sequence> postProcessingFn; - private List orderByColumnSpecs = Lists.newArrayList(); + private List orderByColumnSpecs = new ArrayList<>(); private int limit = Integer.MAX_VALUE; public Builder() @@ -930,7 +929,7 @@ public class GroupByQuery extends BaseQuery public Builder addDimension(DimensionSpec dimension) { if (dimensions == null) { - dimensions = Lists.newArrayList(); + dimensions = new ArrayList<>(); } dimensions.add(dimension); @@ -955,7 +954,7 @@ public class GroupByQuery extends BaseQuery public Builder addAggregator(AggregatorFactory aggregator) { if (aggregatorSpecs == null) { - aggregatorSpecs = Lists.newArrayList(); + aggregatorSpecs = new ArrayList<>(); } aggregatorSpecs.add(aggregator); diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryEngine.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryEngine.java index 74effcbdf3a..c8dcef3857c 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryEngine.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryEngine.java @@ -57,10 +57,12 @@ import java.io.Closeable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; +import java.util.TreeMap; /** */ @@ -153,9 +155,9 @@ public class GroupByQueryEngine private final BufferAggregator[] aggregators; private final PositionMaintainer positionMaintainer; - private final Map positions = Maps.newTreeMap(); + private final Map positions = new TreeMap<>(); // GroupBy queries tend to do a lot of reads from this. We co-store a hash map to make those reads go faster. - private final Map positionsHash = Maps.newHashMap(); + private final Map positionsHash = new HashMap<>(); public RowUpdater( ByteBuffer metricValues, @@ -198,7 +200,7 @@ public class GroupByQueryEngine List unaggregatedBuffers = updateValues(newKey, dims.subList(1, dims.size())); if (unaggregatedBuffers != null) { if (retVal == null) { - retVal = Lists.newArrayList(); + retVal = new ArrayList<>(); } retVal.addAll(unaggregatedBuffers); } diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryHelper.java index 4a7c50c863c..10f582c5508 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryHelper.java @@ -48,6 +48,7 @@ import org.apache.druid.segment.incremental.IndexSizeExceededException; import org.joda.time.DateTime; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Queue; @@ -106,7 +107,7 @@ public class GroupByQueryHelper final boolean sortResults = query.getContextValue(CTX_KEY_SORT_RESULTS, true); // All groupBy dimensions are strings, for now. - final List dimensionSchemas = Lists.newArrayList(); + final List dimensionSchemas = new ArrayList<>(); for (DimensionSpec dimension : query.getDimensions()) { dimensionSchemas.add(new StringDimensionSchema(dimension.getOutputName())); } diff --git a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChest.java index 4a9a298becd..e47f64df6ef 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChest.java @@ -69,6 +69,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** */ @@ -173,7 +174,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest subqueryContext = Maps.newTreeMap(); + final Map subqueryContext = new TreeMap<>(); if (query.getContext() != null) { for (Map.Entry entry : query.getContext().entrySet()) { if (entry.getValue() != null) { @@ -325,7 +326,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest values = Maps.newHashMap(inputRow.getEvent()); + final Map values = new HashMap<>(inputRow.getEvent()); for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, inputRow.getEvent().get(agg.getName()))); } @@ -379,7 +380,7 @@ public class GroupByQueryQueryToolChest extends QueryToolChest event = Maps.newHashMap(preMapRow.getEvent()); + Map event = new HashMap<>(preMapRow.getEvent()); for (String dim : optimizedDims) { final Object eventVal = event.get(dim); event.put(dim, extractionFnMap.get(dim).apply(eventVal)); diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java index f1eafde7bd2..d13096cb5e6 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java @@ -60,6 +60,7 @@ import org.apache.druid.query.groupby.epinephelinae.RowBasedGrouperHelper.RowBas import java.io.Closeable; import java.io.File; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.UUID; @@ -159,7 +160,7 @@ public class GroupByMergingQueryRunnerV2 implements QueryRunner @Override public CloseableGrouperIterator make() { - final List resources = Lists.newArrayList(); + final List resources = new ArrayList<>(); try { final LimitedTemporaryStorage temporaryStorage = new LimitedTemporaryStorage( diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java index 07f3785545d..d9f5d1d21b6 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java @@ -20,7 +20,6 @@ package org.apache.druid.query.groupby.epinephelinae; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; import org.apache.commons.io.FileUtils; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; @@ -36,6 +35,7 @@ import java.nio.file.Files; import java.nio.file.StandardOpenOption; import java.util.EnumSet; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; /** @@ -50,7 +50,7 @@ public class LimitedTemporaryStorage implements Closeable private final long maxBytesUsed; private final AtomicLong bytesUsed = new AtomicLong(); - private final Set files = Sets.newTreeSet(); + private final Set files = new TreeSet<>(); private volatile boolean closed = false; diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java index 0fda3e95dd1..0b82472f351 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/RowBasedGrouperHelper.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; @@ -749,11 +748,11 @@ public class RowBasedGrouperHelper private Comparator> objectComparatorWithAggs() { // use the actual sort order from the limitspec if pushing down to merge partial results correctly - final List needsReverses = Lists.newArrayList(); - final List aggFlags = Lists.newArrayList(); - final List isNumericField = Lists.newArrayList(); - final List comparators = Lists.newArrayList(); - final List fieldIndices = Lists.newArrayList(); + final List needsReverses = new ArrayList<>(); + final List aggFlags = new ArrayList<>(); + final List isNumericField = new ArrayList<>(); + final List comparators = new ArrayList<>(); + final List fieldIndices = new ArrayList<>(); final Set orderByIndices = new HashSet<>(); for (OrderByColumnSpec orderSpec : limitSpec.getColumns()) { @@ -1175,7 +1174,7 @@ public class RowBasedGrouperHelper ) { final List adjustedSerdeHelpers; - final List needsReverses = Lists.newArrayList(); + final List needsReverses = new ArrayList<>(); List orderByHelpers = new ArrayList<>(); List otherDimHelpers = new ArrayList<>(); Set orderByIndices = new HashSet<>(); diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java index 61b7c09f8af..a24798342e0 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/SpillingGrouper.java @@ -26,7 +26,6 @@ import com.google.common.base.Function; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import net.jpountz.lz4.LZ4BlockInputStream; import net.jpountz.lz4.LZ4BlockOutputStream; import org.apache.druid.java.util.common.CloseableIterators; @@ -69,8 +68,8 @@ public class SpillingGrouper implements Grouper private final Comparator> keyObjComparator; private final Comparator> defaultOrderKeyObjComparator; - private final List files = Lists.newArrayList(); - private final List dictionaryFiles = Lists.newArrayList(); + private final List files = new ArrayList<>(); + private final List dictionaryFiles = new ArrayList<>(); private final boolean sortHasNonGroupingFields; private boolean spillingAllowed = false; diff --git a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java index f2f6914c019..c44605e8a0a 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/epinephelinae/column/DictionaryBuildingStringGroupByColumnSelectorStrategy.java @@ -20,7 +20,6 @@ package org.apache.druid.query.groupby.epinephelinae.column; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap; import org.apache.druid.common.config.NullHandling; import org.apache.druid.segment.ColumnValueSelector; @@ -29,6 +28,7 @@ import org.apache.druid.segment.data.ArrayBasedIndexedInts; import org.apache.druid.segment.data.IndexedInts; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -41,7 +41,7 @@ public class DictionaryBuildingStringGroupByColumnSelectorStrategy extends Strin private static final int GROUP_BY_MISSING_VALUE = -1; private int nextId = 0; - private final List dictionary = Lists.newArrayList(); + private final List dictionary = new ArrayList<>(); private final Object2IntOpenHashMap reverseDictionary = new Object2IntOpenHashMap<>(); { reverseDictionary.defaultReturnValue(-1); diff --git a/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java b/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java index b74f93eadd2..7a4c5f3a66a 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpec.java @@ -25,9 +25,7 @@ import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; import org.apache.druid.common.config.NullHandling; @@ -47,6 +45,8 @@ import org.apache.druid.segment.column.ValueType; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -131,7 +131,7 @@ public class DefaultLimitSpec implements LimitSpec boolean sortingNeeded = dimensions.size() < columns.size(); - final Set aggAndPostAggNames = Sets.newHashSet(); + final Set aggAndPostAggNames = new HashSet<>(); for (AggregatorFactory agg : aggs) { aggAndPostAggNames.add(agg.getName()); } @@ -220,17 +220,17 @@ public class DefaultLimitSpec implements LimitSpec } }; - Map dimensionsMap = Maps.newHashMap(); + Map dimensionsMap = new HashMap<>(); for (DimensionSpec spec : dimensions) { dimensionsMap.put(spec.getOutputName(), spec); } - Map aggregatorsMap = Maps.newHashMap(); + Map aggregatorsMap = new HashMap<>(); for (final AggregatorFactory agg : aggs) { aggregatorsMap.put(agg.getName(), agg); } - Map postAggregatorsMap = Maps.newHashMap(); + Map postAggregatorsMap = new HashMap<>(); for (PostAggregator postAgg : postAggs) { postAggregatorsMap.put(postAgg.getName(), postAgg); } diff --git a/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV1.java b/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV1.java index d111650d1dd..ee6f47341e1 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV1.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV1.java @@ -25,8 +25,6 @@ import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.inject.Inject; import org.apache.druid.collections.NonBlockingPool; @@ -56,6 +54,8 @@ import org.apache.druid.segment.incremental.IncrementalIndexStorageAdapter; import org.joda.time.Interval; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -164,7 +164,7 @@ public class GroupByStrategyV1 implements GroupByStrategy boolean wasQueryPushedDown ) { - final Set aggs = Sets.newHashSet(); + final Set aggs = new HashSet<>(); // Nested group-bys work by first running the inner query and then materializing the results in an incremental // index which the outer query is then run against. To build the incremental index, we use the fieldNames from @@ -173,7 +173,7 @@ public class GroupByStrategyV1 implements GroupByStrategy // multiple columns of the same name using different aggregator types and will fail. Here, we permit multiple // aggregators of the same type referencing the same fieldName (and skip creating identical columns for the // subsequent ones) and return an error if the aggregator types are different. - final Set dimensionNames = Sets.newHashSet(); + final Set dimensionNames = new HashSet<>(); for (DimensionSpec dimension : subquery.getDimensions()) { dimensionNames.add(dimension.getOutputName()); } @@ -208,7 +208,7 @@ public class GroupByStrategyV1 implements GroupByStrategy final GroupByQuery innerQuery = new GroupByQuery.Builder(subquery) .setAggregatorSpecs(ImmutableList.copyOf(aggs)) .setInterval(subquery.getIntervals()) - .setPostAggregatorSpecs(Lists.newArrayList()) + .setPostAggregatorSpecs(new ArrayList<>()) .build(); final GroupByQuery outerQuery = new GroupByQuery.Builder(query) diff --git a/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV2.java b/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV2.java index b3f3919b509..434a828bafe 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV2.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/strategy/GroupByStrategyV2.java @@ -337,7 +337,7 @@ public class GroupByStrategyV2 implements GroupByStrategy { // This contains all closeable objects which are closed when the returned iterator iterates all the elements, // or an exceptions is thrown. The objects are closed in their reverse order. - final List closeOnExit = Lists.newArrayList(); + final List closeOnExit = new ArrayList<>(); try { Supplier grouperSupplier = Suppliers.memoize( @@ -386,7 +386,7 @@ public class GroupByStrategyV2 implements GroupByStrategy { // This contains all closeable objects which are closed when the returned iterator iterates all the elements, // or an exceptions is thrown. The objects are closed in their reverse order. - final List closeOnExit = Lists.newArrayList(); + final List closeOnExit = new ArrayList<>(); try { GroupByQuery queryWithoutSubtotalsSpec = query.withSubtotalsSpec(null); diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java index 1fe4a49dca5..8db528560e4 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java @@ -22,8 +22,6 @@ package org.apache.druid.query.metadata; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; @@ -53,8 +51,10 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.util.EnumSet; +import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.TreeMap; public class SegmentAnalyzer { @@ -92,11 +92,11 @@ public class SegmentAnalyzer // get length and column names from storageAdapter final int length = storageAdapter.getNumRows(); - final Set columnNames = Sets.newHashSet(); + final Set columnNames = new HashSet<>(); Iterables.addAll(columnNames, storageAdapter.getAvailableDimensions()); Iterables.addAll(columnNames, storageAdapter.getAvailableMetrics()); - Map columns = Maps.newTreeMap(); + Map columns = new TreeMap<>(); for (String columnName : columnNames) { final ColumnHolder columnHolder = index == null ? null : index.getColumnHolder(columnName); diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index a08f3788c97..4dc50f56887 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -27,7 +27,6 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.inject.Inject; @@ -60,10 +59,13 @@ import org.joda.time.Interval; import javax.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; public class SegmentMetadataQueryQueryToolChest extends QueryToolChest { @@ -273,19 +275,19 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest newIntervals = null; if (arg1.getIntervals() != null) { - newIntervals = Lists.newArrayList(); + newIntervals = new ArrayList<>(); newIntervals.addAll(arg1.getIntervals()); } if (arg2.getIntervals() != null) { if (newIntervals == null) { - newIntervals = Lists.newArrayList(); + newIntervals = new ArrayList<>(); } newIntervals.addAll(arg2.getIntervals()); } final Map leftColumns = arg1.getColumns(); final Map rightColumns = arg2.getColumns(); - Map columns = Maps.newTreeMap(); + Map columns = new TreeMap<>(); Set rightColumnNames = Sets.newHashSet(rightColumns.keySet()); for (Map.Entry entry : leftColumns.entrySet()) { @@ -298,7 +300,7 @@ public class SegmentMetadataQueryQueryToolChest extends QueryToolChest aggregators = Maps.newHashMap(); + final Map aggregators = new HashMap<>(); if (lenientAggregatorMerge) { // Merge each aggregator individually, ignoring nulls diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index da385178e35..04270fffb47 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -21,7 +21,6 @@ package org.apache.druid.query.metadata; import com.google.common.base.Function; import com.google.common.base.Throwables; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; @@ -51,8 +50,10 @@ import org.apache.druid.segment.Segment; import org.joda.time.Interval; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; @@ -97,7 +98,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory columns = Maps.newTreeMap(); + Map columns = new TreeMap<>(); ColumnIncluderator includerator = updatedQuery.getToInclude(); for (Map.Entry entry : analyzedColumns.entrySet()) { final String columnName = entry.getKey(); @@ -118,7 +119,7 @@ public class SegmentMetadataQueryRunnerFactory implements QueryRunnerFactory(); for (AggregatorFactory aggregator : metadata.getAggregators()) { aggregators.put(aggregator.getName(), aggregator); } diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ListColumnIncluderator.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ListColumnIncluderator.java index dd3473a8ebb..b1a336c3a40 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/ListColumnIncluderator.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/ListColumnIncluderator.java @@ -22,13 +22,13 @@ package org.apache.druid.query.metadata.metadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.TreeSet; /** */ @@ -41,7 +41,7 @@ public class ListColumnIncluderator implements ColumnIncluderator @JsonProperty("columns") List columns ) { - this.columns = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); + this.columns = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); this.columns.addAll(columns); } diff --git a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java index b93731942e4..3bce2011cf6 100644 --- a/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java +++ b/processing/src/main/java/org/apache/druid/query/scan/ScanQuery.java @@ -21,7 +21,6 @@ package org.apache.druid.query.scan; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.apache.druid.query.BaseQuery; import org.apache.druid.query.DataSource; import org.apache.druid.query.Query; @@ -31,6 +30,7 @@ import org.apache.druid.query.spec.QuerySegmentSpec; import org.apache.druid.segment.VirtualColumn; import org.apache.druid.segment.VirtualColumns; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -244,7 +244,7 @@ public class ScanQuery extends BaseQuery batchSize = 0; limit = 0; dimFilter = null; - columns = Lists.newArrayList(); + columns = new ArrayList<>(); legacy = null; } diff --git a/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java index 94d5aefa98f..8f11e4b5919 100644 --- a/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/search/SearchQueryQueryToolChest.java @@ -26,7 +26,6 @@ import com.google.common.base.Functions; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.primitives.Ints; import com.google.inject.Inject; @@ -53,6 +52,7 @@ import org.apache.druid.query.filter.DimFilter; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -232,7 +232,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest result = (List) input; boolean needsRename = false; - final Map outputNameMap = Maps.newHashMap(); + final Map outputNameMap = new HashMap<>(); if (hasOutputName(result)) { List cachedOutputNames = (List) result.get(2); Preconditions.checkArgument(cachedOutputNames.size() == dimOutputNames.size(), diff --git a/processing/src/main/java/org/apache/druid/query/search/UseIndexesStrategy.java b/processing/src/main/java/org/apache/druid/query/search/UseIndexesStrategy.java index 43cc6f2308a..6bb7a097d04 100644 --- a/processing/src/main/java/org/apache/druid/query/search/UseIndexesStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/search/UseIndexesStrategy.java @@ -21,7 +21,6 @@ package org.apache.druid.query.search; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import it.unimi.dsi.fastutil.objects.Object2IntRBTreeMap; import org.apache.druid.collections.bitmap.BitmapFactory; import org.apache.druid.collections.bitmap.ImmutableBitmap; @@ -44,6 +43,7 @@ import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.NumericColumn; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -114,8 +114,8 @@ public class UseIndexesStrategy extends SearchStrategy List dimensions ) { - final List bitmapDims = Lists.newArrayList(); - final List nonBitmapDims = Lists.newArrayList(); + final List bitmapDims = new ArrayList<>(); + final List nonBitmapDims = new ArrayList<>(); final List dimsToSearch = getDimsToSearch( adapter.getAvailableDimensions(), dimensions diff --git a/processing/src/main/java/org/apache/druid/query/select/PagingSpec.java b/processing/src/main/java/org/apache/druid/query/select/PagingSpec.java index c77c16c2448..58c8628e05a 100644 --- a/processing/src/main/java/org/apache/druid/query/select/PagingSpec.java +++ b/processing/src/main/java/org/apache/druid/query/select/PagingSpec.java @@ -22,10 +22,10 @@ package org.apache.druid.query.select; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; +import java.util.HashMap; import java.util.Map; /** @@ -39,7 +39,7 @@ public class PagingSpec public static Map merge(Iterable> cursors) { - Map next = Maps.newHashMap(); + Map next = new HashMap<>(); for (Map cursor : cursors) { for (Map.Entry entry : cursor.entrySet()) { next.put(entry.getKey(), entry.getValue()); @@ -68,7 +68,7 @@ public class PagingSpec @JacksonInject SelectQueryConfig config ) { - this.pagingIdentifiers = pagingIdentifiers == null ? Maps.newHashMap() : pagingIdentifiers; + this.pagingIdentifiers = pagingIdentifiers == null ? new HashMap<>() : pagingIdentifiers; this.threshold = threshold; boolean defaultFromNext = config.getEnableFromNextDefault(); diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java index cc1bbaa2472..5df45e1bb99 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java @@ -56,6 +56,7 @@ import org.joda.time.Interval; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -250,7 +251,7 @@ public class SelectQueryEngine builder.addDimension(dimSpec.getOutputName()); } - final Map> metSelectors = Maps.newHashMap(); + final Map> metSelectors = new HashMap<>(); for (String metric : metrics) { final BaseObjectColumnValueSelector metricSelector = cursor.getColumnSelectorFactory().makeColumnValueSelector(metric); diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/select/SelectQueryQueryToolChest.java index cb331059db3..3d342e3100e 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectQueryQueryToolChest.java @@ -28,9 +28,7 @@ import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularity; @@ -63,6 +61,7 @@ import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; +import java.util.TreeSet; /** */ @@ -192,7 +191,7 @@ public class SelectQueryQueryToolChest extends QueryToolChest metrics = Sets.newTreeSet(); + final Set metrics = new TreeSet<>(); if (query.getMetrics() != null) { metrics.addAll(query.getMetrics()); } @@ -394,7 +393,7 @@ public class SelectQueryQueryToolChest extends QueryToolChest granularThresholds = Maps.newTreeMap(); + TreeMap granularThresholds = new TreeMap<>(); for (Interval interval : intervals) { if (query.isDescending()) { long granularEnd = granularity.bucketStart(interval.getEnd()).getMillis(); diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java index a4ded3feee6..8f98cfd3cc4 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java @@ -22,14 +22,14 @@ package org.apache.druid.query.select; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; -import com.google.common.collect.Queues; -import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import org.apache.druid.java.util.common.guava.Comparators; import org.apache.druid.query.Result; import org.joda.time.DateTime; +import java.util.ArrayDeque; import java.util.Comparator; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Queue; @@ -72,8 +72,8 @@ public class SelectResultValueBuilder this.timestamp = timestamp; this.pagingSpec = pagingSpec; this.descending = descending; - this.dimensions = Sets.newHashSet(); - this.metrics = Sets.newHashSet(); + this.dimensions = new HashSet<>(); + this.metrics = new HashSet<>(); this.pagingIdentifiers = Maps.newLinkedHashMap(); this.pQueue = instantiatePQueue(); } @@ -123,7 +123,7 @@ public class SelectResultValueBuilder protected Queue instantiatePQueue() { - return Queues.newArrayDeque(); + return new ArrayDeque<>(); } public static class MergeBuilder extends SelectResultValueBuilder diff --git a/processing/src/main/java/org/apache/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/org/apache/druid/query/spec/SpecificSegmentQueryRunner.java index c7bfad1b31f..a2673646621 100644 --- a/processing/src/main/java/org/apache/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/org/apache/druid/query/spec/SpecificSegmentQueryRunner.java @@ -20,7 +20,6 @@ package org.apache.druid.query.spec; import com.google.common.base.Supplier; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.guava.Accumulator; import org.apache.druid.java.util.common.guava.Sequence; @@ -37,6 +36,7 @@ import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.segment.SegmentMissingException; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -155,7 +155,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner { List missingSegments = (List) responseContext.get(Result.MISSING_SEGMENTS_KEY); if (missingSegments == null) { - missingSegments = Lists.newArrayList(); + missingSegments = new ArrayList<>(); responseContext.put(Result.MISSING_SEGMENTS_KEY, missingSegments); } missingSegments.add(specificSpec.getDescriptor()); diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java index 57de16af7b1..8c328ca29ad 100644 --- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java +++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQuery.java @@ -21,8 +21,6 @@ package org.apache.druid.query.timeboundary; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import it.unimi.dsi.fastutil.bytes.ByteArrays; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; @@ -38,6 +36,8 @@ import org.apache.druid.query.spec.QuerySegmentSpec; import org.joda.time.DateTime; import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -128,8 +128,8 @@ public class TimeBoundaryQuery extends BaseQuery public Iterable> buildResult(DateTime timestamp, DateTime min, DateTime max) { - List> results = Lists.newArrayList(); - Map result = Maps.newHashMap(); + List> results = new ArrayList<>(); + Map result = new HashMap<>(); if (min != null) { result.put(MIN_TIME, min); @@ -147,7 +147,7 @@ public class TimeBoundaryQuery extends BaseQuery public Iterable> mergeResults(List> results) { if (results == null || results.isEmpty()) { - return Lists.newArrayList(); + return new ArrayList<>(); } DateTime min = DateTimes.MAX; diff --git a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQueryQueryToolChest.java index 1cb75c5ed99..39b462c02fa 100644 --- a/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/timeseries/TimeseriesQueryQueryToolChest.java @@ -378,7 +378,7 @@ public class TimeseriesQueryQueryToolChest extends QueryToolChest { final TimeseriesResultValue holder = result.getValue(); - final Map values = Maps.newHashMap(holder.getBaseObject()); + final Map values = new HashMap<>(holder.getBaseObject()); if (calculatePostAggs && !query.getPostAggregatorSpecs().isEmpty()) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { diff --git a/processing/src/main/java/org/apache/druid/query/topn/TimeExtractionTopNAlgorithm.java b/processing/src/main/java/org/apache/druid/query/topn/TimeExtractionTopNAlgorithm.java index 57d1685f223..3fd8795303c 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TimeExtractionTopNAlgorithm.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TimeExtractionTopNAlgorithm.java @@ -19,13 +19,13 @@ package org.apache.druid.query.topn; -import com.google.common.collect.Maps; import org.apache.druid.query.ColumnSelectorPlus; import org.apache.druid.query.aggregation.Aggregator; import org.apache.druid.segment.Cursor; import org.apache.druid.segment.DimensionSelector; import org.apache.druid.segment.StorageAdapter; +import java.util.HashMap; import java.util.Map; public class TimeExtractionTopNAlgorithm extends BaseTopNAlgorithm, TopNParams> @@ -65,7 +65,7 @@ public class TimeExtractionTopNAlgorithm extends BaseTopNAlgorithm makeDimValAggregateStore(TopNParams params) { - return Maps.newHashMap(); + return new HashMap<>(); } @Override diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNQueryBuilder.java b/processing/src/main/java/org/apache/druid/query/topn/TopNQueryBuilder.java index 5fd3b00ab0a..e140a20453e 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNQueryBuilder.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNQueryBuilder.java @@ -88,8 +88,8 @@ public class TopNQueryBuilder querySegmentSpec = null; dimFilter = null; granularity = Granularities.ALL; - aggregatorSpecs = Lists.newArrayList(); - postAggregatorSpecs = Lists.newArrayList(); + aggregatorSpecs = new ArrayList<>(); + postAggregatorSpecs = new ArrayList<>(); context = null; } diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNResultValue.java b/processing/src/main/java/org/apache/druid/query/topn/TopNResultValue.java index 84d97efa2f5..01b069c8959 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNResultValue.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNResultValue.java @@ -26,6 +26,7 @@ import com.google.common.collect.Lists; import org.apache.druid.java.util.common.IAE; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -41,7 +42,7 @@ public class TopNResultValue implements Iterable value ) { - this.value = (value == null) ? Lists.newArrayList() : Lists.transform( + this.value = (value == null) ? new ArrayList<>() : Lists.transform( value, new Function() { diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java b/processing/src/main/java/org/apache/druid/segment/IndexIO.java index 3c5a06fe311..1ee8b7c21e9 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java @@ -73,10 +73,12 @@ import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; public class IndexIO { @@ -347,9 +349,9 @@ public class IndexIO metrics.put(metric, holder); } - Map> dimValueLookups = Maps.newHashMap(); - Map dimColumns = Maps.newHashMap(); - Map> bitmaps = Maps.newHashMap(); + Map> dimValueLookups = new HashMap<>(); + Map dimColumns = new HashMap<>(); + Map> bitmaps = new HashMap<>(); for (String dimension : IndexedIterable.create(availableDimensions)) { ByteBuffer dimBuffer = smooshedFiles.mapFile(makeDimFile(inDir, dimension).getName()); @@ -373,7 +375,7 @@ public class IndexIO ); } - Map spatialIndexed = Maps.newHashMap(); + Map spatialIndexed = new HashMap<>(); ByteBuffer spatialBuffer = smooshedFiles.mapFile("spatial.drd"); while (spatialBuffer != null && spatialBuffer.hasRemaining()) { spatialIndexed.put( @@ -424,7 +426,7 @@ public class IndexIO { MMappedIndex index = legacyHandler.mapDir(inDir); - Map columns = Maps.newHashMap(); + Map columns = new HashMap<>(); for (String dimension : index.getAvailableDimensions()) { ColumnBuilder builder = new ColumnBuilder() @@ -482,7 +484,7 @@ public class IndexIO } } - Set colSet = Sets.newTreeSet(); + Set colSet = new TreeSet<>(); for (String dimension : index.getAvailableDimensions()) { colSet.add(dimension); } @@ -583,7 +585,7 @@ public class IndexIO } } - Map columns = Maps.newHashMap(); + Map columns = new HashMap<>(); for (String columnName : cols) { if (Strings.isNullOrEmpty(columnName)) { diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMerger.java b/processing/src/main/java/org/apache/druid/segment/IndexMerger.java index 0f29cb150c1..4661b652eed 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexMerger.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexMerger.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; -import com.google.common.collect.Sets; import com.google.inject.ImplementedBy; import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.utils.SerializerUtils; @@ -54,6 +53,7 @@ import java.util.NoSuchElementException; import java.util.Objects; import java.util.PriorityQueue; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; @ImplementedBy(IndexMergerV9.class) @@ -142,7 +142,7 @@ public interface IndexMerger static > ArrayList mergeIndexed(List> indexedLists) { - Set retVal = Sets.newTreeSet(Comparators.naturalNullsFirst()); + Set retVal = new TreeSet<>(Comparators.naturalNullsFirst()); for (Iterable indexedList : indexedLists) { for (T val : indexedList) { @@ -388,7 +388,7 @@ public interface IndexMerger }; protected final IntBuffer[] conversions; - protected final List> directBufferAllocations = Lists.newArrayList(); + protected final List> directBufferAllocations = new ArrayList<>(); protected final PriorityQueue>> pQueue; protected int counter; diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java index d39994e42a8..135f8cb1266 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java @@ -24,8 +24,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.io.Files; import com.google.common.primitives.Ints; import com.google.inject.Inject; @@ -75,10 +73,13 @@ import java.nio.IntBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import java.util.function.Function; import java.util.stream.Collectors; @@ -156,8 +157,8 @@ public class IndexMergerV9 implements IndexMerger log.info("Completed factory.json in %,d millis", System.currentTimeMillis() - startTime); progress.progress(); - final Map metricsValueTypes = Maps.newTreeMap(Comparators.naturalNullsFirst()); - final Map metricTypeNames = Maps.newTreeMap(Comparators.naturalNullsFirst()); + final Map metricsValueTypes = new TreeMap<>(Comparators.naturalNullsFirst()); + final Map metricTypeNames = new TreeMap<>(Comparators.naturalNullsFirst()); final List dimCapabilities = Lists.newArrayListWithCapacity(mergedDimensions.size()); mergeCapabilities(adapters, mergedDimensions, metricsValueTypes, metricTypeNames, dimCapabilities); @@ -270,8 +271,8 @@ public class IndexMergerV9 implements IndexMerger progress.startSection(section); long startTime = System.currentTimeMillis(); - final Set finalDimensions = Sets.newLinkedHashSet(); - final Set finalColumns = Sets.newLinkedHashSet(); + final Set finalDimensions = new LinkedHashSet<>(); + final Set finalColumns = new LinkedHashSet<>(); finalColumns.addAll(mergedMetrics); for (int i = 0; i < mergedDimensions.size(); ++i) { if (mergers.get(i).canSkip()) { @@ -707,7 +708,7 @@ public class IndexMergerV9 implements IndexMerger final List dimCapabilities ) { - final Map capabilitiesMap = Maps.newHashMap(); + final Map capabilitiesMap = new HashMap<>(); for (IndexableAdapter adapter : adapters) { for (String dimension : adapter.getDimensionNames()) { ColumnCapabilities capabilities = adapter.getCapabilities(dimension); diff --git a/processing/src/main/java/org/apache/druid/segment/StringDimensionIndexer.java b/processing/src/main/java/org/apache/druid/segment/StringDimensionIndexer.java index 3b4954600b6..cf201e4f9b2 100644 --- a/processing/src/main/java/org/apache/druid/segment/StringDimensionIndexer.java +++ b/processing/src/main/java/org/apache/druid/segment/StringDimensionIndexer.java @@ -50,6 +50,7 @@ import org.apache.druid.segment.incremental.IncrementalIndexRow; import org.apache.druid.segment.incremental.IncrementalIndexRowHolder; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Iterator; @@ -77,7 +78,7 @@ public class StringDimensionIndexer implements DimensionIndexer valueToId = new Object2IntOpenHashMap<>(); - private final List idToValue = Lists.newArrayList(); + private final List idToValue = new ArrayList<>(); private final ReentrantReadWriteLock lock; public DimensionDictionary() diff --git a/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java b/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java index 5e4e6eb5ff2..ce4dcaf60bf 100644 --- a/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java +++ b/processing/src/main/java/org/apache/druid/segment/VirtualColumns.java @@ -25,7 +25,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.druid.java.util.common.Cacheable; import org.apache.druid.java.util.common.IAE; @@ -37,6 +36,7 @@ import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.virtual.VirtualizedColumnSelectorFactory; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -76,8 +76,8 @@ public class VirtualColumns implements Cacheable if (virtualColumns == null || virtualColumns.isEmpty()) { return EMPTY; } - Map withDotSupport = Maps.newHashMap(); - Map withoutDotSupport = Maps.newHashMap(); + Map withDotSupport = new HashMap<>(); + Map withoutDotSupport = new HashMap<>(); for (VirtualColumn vc : virtualColumns) { if (Strings.isNullOrEmpty(vc.getOutputName())) { throw new IAE("Empty or null virtualColumn name"); diff --git a/processing/src/main/java/org/apache/druid/segment/column/ColumnDescriptor.java b/processing/src/main/java/org/apache/druid/segment/column/ColumnDescriptor.java index dd341eb686c..1307640fcf9 100644 --- a/processing/src/main/java/org/apache/druid/segment/column/ColumnDescriptor.java +++ b/processing/src/main/java/org/apache/druid/segment/column/ColumnDescriptor.java @@ -22,7 +22,6 @@ package org.apache.druid.segment.column; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.io.smoosh.FileSmoosher; import org.apache.druid.java.util.common.io.smoosh.SmooshedFileMapper; @@ -32,6 +31,7 @@ import org.apache.druid.segment.serde.Serializer; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; +import java.util.ArrayList; import java.util.List; /** @@ -114,7 +114,7 @@ public class ColumnDescriptor implements Serializer private ValueType valueType = null; private Boolean hasMultipleValues = null; - private final List parts = Lists.newArrayList(); + private final List parts = new ArrayList<>(); public Builder setValueType(ValueType valueType) { diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java index a9d4cbe6c42..b204bfd97af 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java @@ -22,7 +22,6 @@ package org.apache.druid.segment.data; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Supplier; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.segment.serde.MetaSerdeHelper; @@ -32,6 +31,7 @@ import org.apache.druid.segment.writeout.WriteOutBytes; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; +import java.util.HashMap; import java.util.Map; import java.util.function.Function; @@ -199,7 +199,7 @@ public class CompressionFactory return id; } - static final Map idMap = Maps.newHashMap(); + static final Map idMap = new HashMap<>(); static { for (LongEncodingFormat format : LongEncodingFormat.values()) { diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java index 526735c3de8..85dc62987e1 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java @@ -21,7 +21,6 @@ package org.apache.druid.segment.data; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import com.google.common.collect.Maps; import com.ning.compress.BufferRecycler; import com.ning.compress.lzf.LZFDecoder; import com.ning.compress.lzf.LZFEncoder; @@ -36,6 +35,7 @@ import org.apache.druid.segment.CompressedPools; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.HashMap; import java.util.Map; /** @@ -136,7 +136,7 @@ public enum CompressionStrategy return valueOf(StringUtils.toUpperCase(name)); } - static final Map idMap = Maps.newHashMap(); + static final Map idMap = new HashMap<>(); static { for (CompressionStrategy strategy : CompressionStrategy.values()) { diff --git a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java index 07ae57413d8..c9630ccecc8 100644 --- a/processing/src/main/java/org/apache/druid/segment/filter/Filters.java +++ b/processing/src/main/java/org/apache/druid/segment/filter/Filters.java @@ -504,14 +504,14 @@ public class Filters return pushDownNot(((NotFilter) child).getBaseFilter()); } if (child instanceof AndFilter) { - List children = Lists.newArrayList(); + List children = new ArrayList<>(); for (Filter grandChild : ((AndFilter) child).getFilters()) { children.add(pushDownNot(new NotFilter(grandChild))); } return new OrFilter(children); } if (child instanceof OrFilter) { - List children = Lists.newArrayList(); + List children = new ArrayList<>(); for (Filter grandChild : ((OrFilter) child).getFilters()) { children.add(pushDownNot(new NotFilter(grandChild))); } @@ -521,7 +521,7 @@ public class Filters if (current instanceof AndFilter) { - List children = Lists.newArrayList(); + List children = new ArrayList<>(); for (Filter child : ((AndFilter) current).getFilters()) { children.add(pushDownNot(child)); } @@ -530,7 +530,7 @@ public class Filters if (current instanceof OrFilter) { - List children = Lists.newArrayList(); + List children = new ArrayList<>(); for (Filter child : ((OrFilter) current).getFilters()) { children.add(pushDownNot(child)); } @@ -547,7 +547,7 @@ public class Filters return new NotFilter(convertToCNFInternal(((NotFilter) current).getBaseFilter())); } if (current instanceof AndFilter) { - List children = Lists.newArrayList(); + List children = new ArrayList<>(); for (Filter child : ((AndFilter) current).getFilters()) { children.add(convertToCNFInternal(child)); } @@ -571,7 +571,7 @@ public class Filters } } if (!andList.isEmpty()) { - List result = Lists.newArrayList(); + List result = new ArrayList<>(); generateAllCombinations(result, andList, nonAndList); return new AndFilter(result); } diff --git a/processing/src/main/java/org/apache/druid/segment/filter/OrFilter.java b/processing/src/main/java/org/apache/druid/segment/filter/OrFilter.java index 62a4b77c0d2..70d0532d17b 100644 --- a/processing/src/main/java/org/apache/druid/segment/filter/OrFilter.java +++ b/processing/src/main/java/org/apache/druid/segment/filter/OrFilter.java @@ -21,7 +21,6 @@ package org.apache.druid.segment.filter; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.apache.druid.collections.bitmap.ImmutableBitmap; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.BitmapResultFactory; @@ -59,7 +58,7 @@ public class OrFilter implements BooleanFilter return filters.get(0).getBitmapResult(selector, bitmapResultFactory); } - List bitmapResults = Lists.newArrayList(); + List bitmapResults = new ArrayList<>(); for (Filter filter : filters) { bitmapResults.add(filter.getBitmapResult(selector, bitmapResultFactory)); } diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java index 08d722544db..e7700cf0997 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java @@ -26,7 +26,6 @@ import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; @@ -83,6 +82,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.Deque; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -277,7 +277,7 @@ public abstract class IncrementalIndex extends AbstractIndex imp this.deserializeComplexMetrics = deserializeComplexMetrics; this.reportParseExceptions = reportParseExceptions; - this.columnCapabilities = Maps.newHashMap(); + this.columnCapabilities = new HashMap<>(); this.metadata = new Metadata( null, getCombiningAggregators(metrics), @@ -689,7 +689,7 @@ public abstract class IncrementalIndex extends AbstractIndex imp if (wasNewDim) { if (overflow == null) { - overflow = Lists.newArrayList(); + overflow = new ArrayList<>(); } overflow.add(dimsKey); } else if (desc.getIndex() > dims.length || dims[desc.getIndex()] != null) { diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java index 3f7f2fedd7b..f18f76879c3 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/OffheapIncrementalIndex.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.incremental; import com.google.common.base.Supplier; -import com.google.common.collect.Maps; import org.apache.druid.collections.NonBlockingPool; import org.apache.druid.collections.ResourceHolder; import org.apache.druid.data.input.InputRow; @@ -37,6 +36,7 @@ import org.apache.druid.segment.ColumnSelectorFactory; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; @@ -109,7 +109,7 @@ public class OffheapIncrementalIndex extends IncrementalIndex final boolean concurrentEventAdd ) { - selectors = Maps.newHashMap(); + selectors = new HashMap<>(); aggOffsetInBuffer = new int[metrics.length]; for (int i = 0; i < metrics.length; i++) { diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java b/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java index 02f0a37cee5..6f49730d694 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java @@ -21,7 +21,6 @@ package org.apache.druid.segment.incremental; import com.google.common.base.Supplier; import com.google.common.base.Throwables; -import com.google.common.collect.Maps; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.io.Closer; @@ -125,7 +124,7 @@ public class OnheapIncrementalIndex extends IncrementalIndex final boolean concurrentEventAdd ) { - selectors = Maps.newHashMap(); + selectors = new HashMap<>(); for (AggregatorFactory agg : metrics) { selectors.put( agg.getName(), diff --git a/processing/src/main/java/org/apache/druid/segment/incremental/SpatialDimensionRowTransformer.java b/processing/src/main/java/org/apache/druid/segment/incremental/SpatialDimensionRowTransformer.java index cebcfb4f74b..401c498d5d4 100644 --- a/processing/src/main/java/org/apache/druid/segment/incremental/SpatialDimensionRowTransformer.java +++ b/processing/src/main/java/org/apache/druid/segment/incremental/SpatialDimensionRowTransformer.java @@ -26,7 +26,6 @@ import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.Row; @@ -34,7 +33,9 @@ import org.apache.druid.data.input.impl.SpatialDimensionSchema; import org.apache.druid.java.util.common.ISE; import org.joda.time.DateTime; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -52,7 +53,7 @@ public class SpatialDimensionRowTransformer implements Function spatialDimensions) { - this.spatialDimensionMap = Maps.newHashMap(); + this.spatialDimensionMap = new HashMap<>(); for (SpatialDimensionSchema spatialDimension : spatialDimensions) { if (this.spatialDimensionMap.put(spatialDimension.getDimName(), spatialDimension) != null) { throw new ISE("Duplicate spatial dimension names found! Check your schema yo!"); @@ -78,7 +79,7 @@ public class SpatialDimensionRowTransformer implements Function> spatialLookup = Maps.newHashMap(); + final Map> spatialLookup = new HashMap<>(); // remove all spatial dimensions final List finalDims = Lists.newArrayList( @@ -162,7 +163,7 @@ public class SpatialDimensionRowTransformer implements Function spatialDimVals = Lists.newArrayList(); + List spatialDimVals = new ArrayList<>(); for (String dim : spatialDim.getDims()) { List partialDimVals = row.getDimension(dim); if (isSpatialDimValsValid(partialDimVals)) { diff --git a/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java b/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java index 314326b9bb4..64f37092a71 100644 --- a/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java +++ b/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java @@ -19,17 +19,17 @@ package org.apache.druid.segment.serde; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.ISE; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.Map; /** */ public class ComplexMetrics { - private static final Map complexSerializers = Maps.newHashMap(); + private static final Map complexSerializers = new HashMap<>(); @Nullable public static ComplexMetricSerde getSerdeForType(String type) diff --git a/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionSelectors.java b/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionSelectors.java index c82be3bbb90..cc1ba97aeaa 100644 --- a/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionSelectors.java +++ b/processing/src/main/java/org/apache/druid/segment/virtual/ExpressionSelectors.java @@ -23,7 +23,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import org.apache.druid.common.config.NullHandling; import org.apache.druid.math.expr.Expr; import org.apache.druid.math.expr.ExprEval; @@ -48,6 +47,7 @@ import org.apache.druid.segment.data.IndexedInts; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -237,7 +237,7 @@ public class ExpressionSelectors private static Expr.ObjectBinding createBindings(Expr expression, ColumnSelectorFactory columnSelectorFactory) { - final Map> suppliers = Maps.newHashMap(); + final Map> suppliers = new HashMap<>(); for (String columnName : Parser.findRequiredBindings(expression)) { final ColumnCapabilities columnCapabilities = columnSelectorFactory .getColumnCapabilities(columnName); diff --git a/processing/src/test/java/org/apache/druid/collections/bitmap/WrappedBitSetBitmapBitSetTest.java b/processing/src/test/java/org/apache/druid/collections/bitmap/WrappedBitSetBitmapBitSetTest.java index 05e23c0c94c..36e6be43cac 100644 --- a/processing/src/test/java/org/apache/druid/collections/bitmap/WrappedBitSetBitmapBitSetTest.java +++ b/processing/src/test/java/org/apache/druid/collections/bitmap/WrappedBitSetBitmapBitSetTest.java @@ -28,6 +28,7 @@ import org.roaringbitmap.IntIterator; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.BitSet; +import java.util.HashSet; import java.util.Set; /** @@ -148,7 +149,7 @@ public class WrappedBitSetBitmapBitSetTest { WrappedBitSetBitmap bitSet = defaultBitSet(); WrappedBitSetBitmap bitSet2 = defaultBitSet(); - Set defaultBitSet = Sets.newHashSet(); + Set defaultBitSet = new HashSet<>(); bitSet.remove(1); bitSet2.remove(2); diff --git a/processing/src/test/java/org/apache/druid/collections/spatial/ImmutableRTreeTest.java b/processing/src/test/java/org/apache/druid/collections/spatial/ImmutableRTreeTest.java index 22e482c1a13..cb02683ed8c 100644 --- a/processing/src/test/java/org/apache/druid/collections/spatial/ImmutableRTreeTest.java +++ b/processing/src/test/java/org/apache/druid/collections/spatial/ImmutableRTreeTest.java @@ -40,6 +40,7 @@ import org.roaringbitmap.IntIterator; import java.nio.ByteBuffer; import java.util.Arrays; +import java.util.HashSet; import java.util.Locale; import java.util.Random; import java.util.Set; @@ -399,7 +400,7 @@ public class ImmutableRTreeTest ImmutableBitmap finalSet = bf.union(points); Assert.assertTrue(finalSet.size() == 500); - Set expected = Sets.newHashSet(); + Set expected = new HashSet<>(); for (int i = 0; i < 500; i++) { expected.add(i); } @@ -445,7 +446,7 @@ public class ImmutableRTreeTest ImmutableBitmap finalSet = bf.union(points); Assert.assertTrue(finalSet.size() == 500); - Set expected = Sets.newHashSet(); + Set expected = new HashSet<>(); for (int i = 0; i < 500; i++) { expected.add(i); } diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java index 479a5244420..bc712b70a6f 100644 --- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java @@ -21,7 +21,6 @@ package org.apache.druid.query; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.InputRowParser; import org.apache.druid.data.input.impl.JSONParseSpec; @@ -270,7 +269,7 @@ public class DoubleStorageTest ) .merge(true) .build(); - List results = runner.run(QueryPlus.wrap(segmentMetadataQuery), Maps.newHashMap()).toList(); + List results = runner.run(QueryPlus.wrap(segmentMetadataQuery), new HashMap<>()).toList(); Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis), results); diff --git a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java index ba4fe7e8e0c..1b49b7dba18 100644 --- a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java @@ -22,8 +22,6 @@ package org.apache.druid.query; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.druid.collections.CloseableStupidPool; @@ -70,9 +68,11 @@ import org.junit.runners.Parameterized; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -84,7 +84,7 @@ public class MultiValuedDimensionTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config, TmpFileSegmentWriteOutMediumFactory.instance()}); constructors.add(new Object[]{config, OffHeapMemorySegmentWriteOutMediumFactory.instance()}); @@ -279,7 +279,7 @@ public class MultiValuedDimensionTest new QueryableIndexSegment("sid1", queryableIndex), null ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); Sequence> result = runner.run(QueryPlus.wrap(query), context); List> expectedResults = Collections.singletonList( new Result( diff --git a/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java index b79c5d31a27..cfc18e85b9f 100644 --- a/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java @@ -454,11 +454,11 @@ public class QueryRunnerTestHelper public Sequence run(QueryPlus queryPlus, Map responseContext) { Query query = queryPlus.getQuery(); - List segments = Lists.newArrayList(); + List segments = new ArrayList<>(); for (Interval interval : query.getIntervals()) { segments.addAll(timeline.lookup(interval)); } - List> sequences = Lists.newArrayList(); + List> sequences = new ArrayList<>(); for (TimelineObjectHolder holder : toolChest.filterSegments(query, segments)) { Segment segment = holder.getObject().getChunk(0).getObject(); QueryPlus queryPlusRunning = queryPlus.withQuerySegmentSpec( diff --git a/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java index 1f866a5b7a2..6e921527aa7 100644 --- a/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java @@ -20,8 +20,6 @@ package org.apache.druid.query; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.guava.Sequence; @@ -35,8 +33,10 @@ import org.apache.druid.segment.TestHelper; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -90,7 +90,7 @@ public class RetryQueryRunnerTest public void testRunWithMissingSegments() { Map context = new ConcurrentHashMap<>(); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); RetryQueryRunner> runner = new RetryQueryRunner<>( new QueryRunner>() { @@ -135,7 +135,7 @@ public class RetryQueryRunnerTest { Map context = new ConcurrentHashMap<>(); context.put("count", 0); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); RetryQueryRunner> runner = new RetryQueryRunner<>( new QueryRunner>() { @@ -157,7 +157,7 @@ public class RetryQueryRunnerTest new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + new HashMap<>() ) ) ) @@ -183,7 +183,7 @@ public class RetryQueryRunnerTest { Map context = new ConcurrentHashMap<>(); context.put("count", 0); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); RetryQueryRunner> runner = new RetryQueryRunner<>( new QueryRunner>() { @@ -205,7 +205,7 @@ public class RetryQueryRunnerTest new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + new HashMap<>() ) ) ) @@ -230,7 +230,7 @@ public class RetryQueryRunnerTest public void testException() { Map context = new ConcurrentHashMap<>(); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); RetryQueryRunner> runner = new RetryQueryRunner<>( new QueryRunner>() { @@ -263,7 +263,7 @@ public class RetryQueryRunnerTest { Map context = new ConcurrentHashMap<>(); context.put("count", 0); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); RetryQueryRunner> runner = new RetryQueryRunner<>( new QueryRunner>() { @@ -288,7 +288,7 @@ public class RetryQueryRunnerTest new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + new HashMap<>() ) ) ) @@ -306,7 +306,7 @@ public class RetryQueryRunnerTest new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + new HashMap<>() ) ) ) @@ -321,7 +321,7 @@ public class RetryQueryRunnerTest new Result<>( DateTimes.nowUtc(), new TimeseriesResultValue( - Maps.newHashMap() + new HashMap<>() ) ) ) diff --git a/processing/src/test/java/org/apache/druid/query/SchemaEvolutionTest.java b/processing/src/test/java/org/apache/druid/query/SchemaEvolutionTest.java index f051d710532..2a5fc3da440 100644 --- a/processing/src/test/java/org/apache/druid/query/SchemaEvolutionTest.java +++ b/processing/src/test/java/org/apache/druid/query/SchemaEvolutionTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.io.Closeables; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.common.config.NullHandling; @@ -58,6 +57,7 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.IOException; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -122,7 +122,7 @@ public class SchemaEvolutionTest ) ), (QueryToolChest>) factory.getToolchest() - ).run(QueryPlus.wrap(query), Maps.newHashMap()); + ).run(QueryPlus.wrap(query), new HashMap<>()); return results.toList(); } @@ -285,7 +285,7 @@ public class SchemaEvolutionTest ); // Only nonexistent(4) - Map result = Maps.newHashMap(); + Map result = new HashMap<>(); result.put("a", NullHandling.defaultLongValue()); result.put("b", NullHandling.defaultDoubleValue()); result.put("c", NullHandling.defaultLongValue()); diff --git a/processing/src/test/java/org/apache/druid/query/TimewarpOperatorTest.java b/processing/src/test/java/org/apache/druid/query/TimewarpOperatorTest.java index 97f5d253ab0..982ad1ddd60 100644 --- a/processing/src/test/java/org/apache/druid/query/TimewarpOperatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/TimewarpOperatorTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.granularity.PeriodGranularity; import org.apache.druid.java.util.common.guava.Sequence; @@ -38,6 +37,7 @@ import org.junit.Assert; import org.junit.Test; import java.util.Collections; +import java.util.HashMap; import java.util.Map; @@ -356,7 +356,7 @@ public class TimewarpOperatorTest new TimeseriesResultValue(ImmutableMap.of("metric", 3)) ) ), - queryRunner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList() + queryRunner.run(QueryPlus.wrap(query), new HashMap<>()).toList() ); } } diff --git a/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java index cbe458d87fd..3cdaed0f0b5 100644 --- a/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java @@ -20,13 +20,13 @@ package org.apache.druid.query; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import junit.framework.Assert; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.junit.Test; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -68,7 +68,7 @@ public class UnionQueryRunnerTest .intervals("2014-01-01T00:00:00Z/2015-01-01T00:00:00Z") .aggregators(QueryRunnerTestHelper.commonDoubleAggregators) .build(); - Map responseContext = Maps.newHashMap(); + Map responseContext = new HashMap<>(); Sequence result = runner.run(QueryPlus.wrap(q), responseContext); List res = result.toList(); Assert.assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6), res); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java index 340212b79cd..27137b9d4c1 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java @@ -31,7 +31,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Throwables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Closeables; import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.io.IOUtils; @@ -94,6 +93,7 @@ import java.lang.reflect.Array; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -635,7 +635,7 @@ public class AggregationTestHelper implements Closeable toolChest ); - return baseRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + return baseRunner.run(QueryPlus.wrap(query), new HashMap<>()); } public QueryRunner makeStringSerdeQueryRunner( @@ -650,7 +650,7 @@ public class AggregationTestHelper implements Closeable public Sequence run(QueryPlus queryPlus, Map map) { try { - Sequence resultSeq = baseRunner.run(queryPlus, Maps.newHashMap()); + Sequence resultSeq = baseRunner.run(queryPlus, new HashMap<>()); final Yielder yielder = resultSeq.toYielder( null, new YieldingAccumulator() diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java index 033778bbd34..79d63c3d4e9 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java @@ -21,9 +21,9 @@ package org.apache.druid.query.aggregation; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; -import com.google.common.collect.Maps; import java.util.Collections; +import java.util.HashMap; import java.util.Map; @@ -33,7 +33,7 @@ import java.util.Map; public class JavaScriptAggregatorBenchmark extends SimpleBenchmark { - protected static final Map scriptDoubleSum = Maps.newHashMap(); + protected static final Map scriptDoubleSum = new HashMap<>(); static { scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }"); scriptDoubleSum.put("fnReset", "function reset() { return 0 }"); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java index 0e276f772c0..960b4701b42 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java @@ -20,7 +20,6 @@ package org.apache.druid.query.aggregation; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.js.JavaScriptConfig; import org.apache.druid.query.dimension.DimensionSpec; @@ -36,12 +35,13 @@ import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.Map; public class JavaScriptAggregatorTest { - protected static final Map sumLogATimesBPlusTen = Maps.newHashMap(); - protected static final Map scriptDoubleSum = Maps.newHashMap(); + protected static final Map sumLogATimesBPlusTen = new HashMap<>(); + protected static final Map scriptDoubleSum = new HashMap<>(); final ColumnSelectorFactory DUMMY_COLUMN_SELECTOR_FACTORY = new ColumnSelectorFactory() { diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java index e720fceb5fd..ec56024ef51 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java @@ -26,7 +26,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.common.config.NullHandling; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.js.JavaScriptConfig; @@ -56,6 +55,7 @@ import org.junit.Test; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -72,8 +72,8 @@ public class CardinalityAggregatorTest public TestDimensionSelector(Iterable values, ExtractionFn exFn) { - this.lookup = Maps.newHashMap(); - this.ids = Maps.newHashMap(); + this.lookup = new HashMap<>(); + this.ids = new HashMap<>(); this.exFn = exFn; int index = 0; diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java index bac084ce949..e3b3fa8f885 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregationTest.java @@ -19,7 +19,6 @@ package org.apache.druid.query.aggregation.hyperloglog; -import com.google.common.collect.Lists; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.jackson.AggregatorsModule; import org.apache.druid.java.util.common.granularity.Granularities; @@ -35,6 +34,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -55,7 +55,7 @@ public class HyperUniquesAggregationTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) { constructors.add(new Object[]{config}); } diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java index a4c19d70ad5..6f22c7246cc 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/post/FinalizingFieldAccessPostAggregatorTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query.aggregation.post; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.jackson.AggregatorsModule; @@ -45,7 +44,9 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -63,7 +64,7 @@ public class FinalizingFieldAccessPostAggregatorTest agg.aggregate(); agg.aggregate(); - Map metricValues = Maps.newHashMap(); + Map metricValues = new HashMap<>(); metricValues.put(aggName, agg.get()); FinalizingFieldAccessPostAggregator postAgg = new FinalizingFieldAccessPostAggregator("final_rows", aggName); @@ -86,7 +87,7 @@ public class FinalizingFieldAccessPostAggregatorTest // Check that the class matches exactly; see https://github.com/apache/incubator-druid/issues/6063 Assert.assertEquals(FinalizingFieldAccessPostAggregator.class, postAgg.getClass()); - Map metricValues = Maps.newHashMap(); + Map metricValues = new HashMap<>(); metricValues.put(aggName, "test"); Assert.assertEquals(new Long(3L), postAgg.compute(metricValues)); @@ -106,7 +107,7 @@ public class FinalizingFieldAccessPostAggregatorTest "final_billy", aggName, ImmutableMap.of(aggName, aggFactory) ); - Map metricValues = Maps.newHashMap(); + Map metricValues = new HashMap<>(); metricValues.put(aggName, "test"); List postAggsList = Lists.newArrayList( @@ -144,7 +145,7 @@ public class FinalizingFieldAccessPostAggregatorTest "final_billy", aggName, ImmutableMap.of(aggName, aggFactory) ); - List computedValues = Lists.newArrayList(); + List computedValues = new ArrayList<>(); computedValues.add(postAgg.compute(ImmutableMap.of(aggName, "test_val1"))); computedValues.add(postAgg.compute(ImmutableMap.of(aggName, "test_val2"))); computedValues.add(postAgg.compute(ImmutableMap.of(aggName, "test_val3"))); @@ -168,8 +169,8 @@ public class FinalizingFieldAccessPostAggregatorTest FinalizingFieldAccessPostAggregator postAgg = buildDecorated( "final_billy", "joe", ImmutableMap.of(aggName, aggFactory)); - List computedValues = Lists.newArrayList(); - Map forNull = Maps.newHashMap(); + List computedValues = new ArrayList<>(); + Map forNull = new HashMap<>(); forNull.put("joe", null); // guava does not allow the value to be null. computedValues.add(postAgg.compute(ImmutableMap.of("joe", "test_val1"))); computedValues.add(postAgg.compute(ImmutableMap.of("joe", "test_val2"))); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/post/JavaScriptPostAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/post/JavaScriptPostAggregatorTest.java index c454d57a58a..63e4978cb7d 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/post/JavaScriptPostAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/post/JavaScriptPostAggregatorTest.java @@ -20,7 +20,6 @@ package org.apache.druid.query.aggregation.post; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.js.JavaScriptConfig; import org.junit.Assert; import org.junit.Rule; @@ -40,7 +39,7 @@ public class JavaScriptPostAggregatorTest { JavaScriptPostAggregator javaScriptPostAggregator; - Map metricValues = Maps.newHashMap(); + Map metricValues = new HashMap<>(); metricValues.put("delta", -10.0); metricValues.put("total", 100.0); diff --git a/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java b/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java index 6d8083c6677..3133044fea9 100644 --- a/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java @@ -26,6 +26,7 @@ import org.apache.druid.java.util.common.StringUtils; import org.junit.Test; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -155,7 +156,7 @@ public class CacheKeyBuilderTest @Test public void testNotEqualStrings() { - final List keys = Lists.newArrayList(); + final List keys = new ArrayList<>(); keys.add( new CacheKeyBuilder((byte) 10) .appendString("test") @@ -239,7 +240,7 @@ public class CacheKeyBuilderTest } }; - final List keys = Lists.newArrayList(); + final List keys = new ArrayList<>(); keys.add( new CacheKeyBuilder((byte) 10) .appendCacheable(test) @@ -268,13 +269,13 @@ public class CacheKeyBuilderTest keys.add( new CacheKeyBuilder((byte) 10) .appendCacheables(Collections.singletonList(testtest)) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(new ArrayList<>()) .build() ); keys.add( new CacheKeyBuilder((byte) 10) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(new ArrayList<>()) .appendCacheables(Collections.singletonList(testtest)) .build() ); @@ -319,7 +320,7 @@ public class CacheKeyBuilderTest public void testEmptyOrNullCacheables() { final byte[] key1 = new CacheKeyBuilder((byte) 10) - .appendCacheables(Lists.newArrayList()) + .appendCacheables(new ArrayList<>()) .build(); final byte[] key2 = new CacheKeyBuilder((byte) 10) diff --git a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index b6fad6066c6..220d2852382 100644 --- a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query.datasourcemetadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; @@ -48,6 +47,7 @@ import org.junit.Assert; import org.junit.Test; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; @@ -137,7 +137,7 @@ public class DataSourceMetadataQueryTest .dataSource("testing") .build(); Map context = new ConcurrentHashMap<>(); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); Iterable> results = runner.run(QueryPlus.wrap(dataSourceMetadataQuery), context).toList(); DataSourceMetadataResultValue val = results.iterator().next().getValue(); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java index bc4c6e6d741..1987c5f1381 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java @@ -29,6 +29,7 @@ import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.Set; public class CascadeExtractionFnTest @@ -69,7 +70,7 @@ public class CascadeExtractionFnTest CascadeExtractionFn cascadeExtractionFn = new CascadeExtractionFn(fns); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String path : paths) { extracted.add(cascadeExtractionFn.apply(path)); } diff --git a/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractorTest.java b/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractorTest.java index dbae78442ca..0fb777ad3c5 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractorTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractorTest.java @@ -27,6 +27,7 @@ import org.junit.Test; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.Map; @@ -43,7 +44,7 @@ public class MapLookupExtractorTest if (NullHandling.sqlCompatible()) { Assert.assertEquals( "Null value should be equal to empty list", - Sets.newHashSet(), + new HashSet<>(), Sets.newHashSet(fn.unapply((String) null)) ); } else { diff --git a/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java index 06711476be4..97efc985c4c 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java @@ -20,13 +20,13 @@ package org.apache.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.jackson.DefaultObjectMapper; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -52,7 +52,7 @@ public class MatchingDimExtractionFnTest String regex = ".*[Tt][Oo].*"; ExtractionFn extractionFn = new MatchingDimExtractionFn(regex); List expected = Arrays.asList("Quito", "Tokyo", "Stockholm", "Pretoria", "Wellington"); - Set extracted = Sets.newHashSet(); + Set extracted = new HashSet<>(); for (String str : testStrings) { String res = extractionFn.apply(str); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java index 9695a933fa3..42757983f53 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java @@ -27,6 +27,7 @@ import org.apache.druid.jackson.DefaultObjectMapper; import org.junit.Assert; import org.junit.Test; +import java.util.LinkedHashSet; import java.util.Set; /** @@ -58,7 +59,7 @@ public class RegexDimExtractionFnTest { String regex = "/([^/]+)/"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String path : paths) { extracted.add(extractionFn.apply(path)); @@ -73,7 +74,7 @@ public class RegexDimExtractionFnTest { String regex = "^/([^/]+/[^/]+)(/|$)"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String path : paths) { extracted.add(extractionFn.apply(path)); @@ -93,7 +94,7 @@ public class RegexDimExtractionFnTest { String regex = "/([^/]{4})/"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, 0, true, null); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String path : paths) { extracted.add(extractionFn.apply(path)); @@ -110,7 +111,7 @@ public class RegexDimExtractionFnTest { String regex = "^/([^/]+)/([^/]+)"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, 2, true, null); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String path : paths) { extracted.add(extractionFn.apply(path)); @@ -130,7 +131,7 @@ public class RegexDimExtractionFnTest { String regex = "(.)"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String testString : testStrings) { extracted.add(extractionFn.apply(testString)); @@ -191,7 +192,7 @@ public class RegexDimExtractionFnTest { String regex = "(a\\w*)"; ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "foobar"); - Set extracted = Sets.newLinkedHashSet(); + Set extracted = new LinkedHashSet<>(); for (String testString : testStrings) { extracted.add(extractionFn.apply(testString)); @@ -210,7 +211,7 @@ public class RegexDimExtractionFnTest Assert.assertArrayEquals(expectedCacheKey, cacheKey); ExtractionFn nullExtractionFn = new RegexDimExtractionFn(regex, true, null); - Set extracted2 = Sets.newLinkedHashSet(); + Set extracted2 = new LinkedHashSet<>(); for (String testString : testStrings) { extracted2.add(nullExtractionFn.apply(testString)); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java index 74c2787a15a..39047bc5245 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java @@ -21,13 +21,13 @@ package org.apache.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.query.search.FragmentSearchQuerySpec; import org.apache.druid.query.search.SearchQuerySpec; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -54,7 +54,7 @@ public class SearchQuerySpecDimExtractionFnTest ); ExtractionFn extractionFn = new SearchQuerySpecDimExtractionFn(spec); List expected = ImmutableList.of("Kyoto", "Tokyo", "Toyokawa", "Yorktown"); - List extracted = Lists.newArrayList(); + List extracted = new ArrayList<>(); for (String str : testStrings) { String res = extractionFn.apply(str); @@ -75,7 +75,7 @@ public class SearchQuerySpecDimExtractionFnTest ); ExtractionFn extractionFn = new SearchQuerySpecDimExtractionFn(spec); List expected = ImmutableList.of("Kyoto"); - List extracted = Lists.newArrayList(); + List extracted = new ArrayList<>(); for (String str : testStrings) { String res = extractionFn.apply(str); @@ -96,7 +96,7 @@ public class SearchQuerySpecDimExtractionFnTest ); ExtractionFn extractionFn = new SearchQuerySpecDimExtractionFn(spec); List expected = ImmutableList.of("Tokyo", "Toyokawa"); - List extracted = Lists.newArrayList(); + List extracted = new ArrayList<>(); for (String str : testStrings) { String res = extractionFn.apply(str); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java index 07513a6ee13..3b0720667c0 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java @@ -20,13 +20,13 @@ package org.apache.druid.query.extraction; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.jackson.DefaultObjectMapper; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.HashSet; import java.util.Set; /** @@ -61,7 +61,7 @@ public class TimeDimExtractionFnTest @Test public void testMonthExtraction() { - Set months = Sets.newHashSet(); + Set months = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", false); for (String dim : dims) { @@ -78,7 +78,7 @@ public class TimeDimExtractionFnTest @Test public void testMonthExtractionJoda() { - Set months = Sets.newHashSet(); + Set months = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", true); for (String dim : dims) { @@ -95,7 +95,7 @@ public class TimeDimExtractionFnTest @Test public void testQuarterExtraction() { - Set quarters = Sets.newHashSet(); + Set quarters = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "QQQ/yyyy", false); for (String dim : dims) { diff --git a/processing/src/test/java/org/apache/druid/query/filter/DimFilterUtilsTest.java b/processing/src/test/java/org/apache/druid/query/filter/DimFilterUtilsTest.java index 77705073508..b8fa4c2b311 100644 --- a/processing/src/test/java/org/apache/druid/query/filter/DimFilterUtilsTest.java +++ b/processing/src/test/java/org/apache/druid/query/filter/DimFilterUtilsTest.java @@ -24,16 +24,16 @@ import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableRangeSet; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import org.apache.druid.timeline.partition.ShardSpec; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Test; import javax.annotation.Nullable; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -81,8 +81,8 @@ public class DimFilterUtilsTest Set result = DimFilterUtils.filterShards(filter, input, CONVERTER); Assert.assertEquals(expected, result); - Map>> dimensionRangeMap = Maps.newHashMap(); - result = Sets.newHashSet(); + Map>> dimensionRangeMap = new HashMap<>(); + result = new HashSet<>(); for (ShardSpec shard : input) { result.addAll(DimFilterUtils.filterShards(filter, ImmutableList.of(shard), CONVERTER, dimensionRangeMap)); } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java index ee10a93d370..8d2885ed75d 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownInsufficientBufferTest.java @@ -26,8 +26,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; @@ -85,6 +83,7 @@ import org.junit.Test; import java.io.File; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -104,8 +103,8 @@ public class GroupByLimitPushDownInsufficientBufferTest private File tmpDir; private QueryRunnerFactory groupByFactory; private QueryRunnerFactory tooSmallGroupByFactory; - private List incrementalIndices = Lists.newArrayList(); - private List groupByIndices = Lists.newArrayList(); + private List incrementalIndices = new ArrayList<>(); + private List groupByIndices = new ArrayList<>(); private ExecutorService executorService; private Closer resourceCloser; @@ -485,7 +484,7 @@ public class GroupByLimitPushDownInsufficientBufferTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = theRunner3.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner3.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -579,7 +578,7 @@ public class GroupByLimitPushDownInsufficientBufferTest ) .build(); - Sequence queryResult = theRunner3.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner3.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -606,7 +605,7 @@ public class GroupByLimitPushDownInsufficientBufferTest private List> getRunner1() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index = groupByIndices.get(0); QueryRunner runner = makeQueryRunner( groupByFactory, @@ -619,7 +618,7 @@ public class GroupByLimitPushDownInsufficientBufferTest private List> getRunner2() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index2 = groupByIndices.get(1); QueryRunner tooSmallRunner = makeQueryRunner( tooSmallGroupByFactory, diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java index 481c0d48ece..bea5670ea94 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByLimitPushDownMultiNodeMergeTest.java @@ -26,8 +26,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; @@ -95,6 +93,7 @@ import org.junit.Test; import java.io.File; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -114,8 +113,8 @@ public class GroupByLimitPushDownMultiNodeMergeTest private File tmpDir; private QueryRunnerFactory groupByFactory; private QueryRunnerFactory groupByFactory2; - private List incrementalIndices = Lists.newArrayList(); - private List groupByIndices = Lists.newArrayList(); + private List incrementalIndices = new ArrayList<>(); + private List groupByIndices = new ArrayList<>(); private ExecutorService executorService; private Closer resourceCloser; @@ -549,7 +548,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = finalRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = finalRunner.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -670,7 +669,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = finalRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = finalRunner.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -707,7 +706,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest private List> getRunner1(int qIndexNumber) { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index = groupByIndices.get(qIndexNumber); QueryRunner runner = makeQueryRunner( groupByFactory, @@ -720,7 +719,7 @@ public class GroupByLimitPushDownMultiNodeMergeTest private List> getRunner2(int qIndexNumber) { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index2 = groupByIndices.get(qIndexNumber); QueryRunner tooSmallRunner = makeQueryRunner( groupByFactory2, diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java index 2cbe5e7b0f7..2f10374e330 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByMultiSegmentTest.java @@ -24,8 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; @@ -82,6 +80,7 @@ import org.junit.Test; import java.io.File; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -99,8 +98,8 @@ public class GroupByMultiSegmentTest private File tmpDir; private QueryRunnerFactory groupByFactory; - private List incrementalIndices = Lists.newArrayList(); - private List groupByIndices = Lists.newArrayList(); + private List incrementalIndices = new ArrayList<>(); + private List groupByIndices = new ArrayList<>(); private ExecutorService executorService; private Closer resourceCloser; @@ -342,7 +341,7 @@ public class GroupByMultiSegmentTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); List results = queryResult.toList(); Row expectedRow = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -357,7 +356,7 @@ public class GroupByMultiSegmentTest private List> makeGroupByMultiRunners() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); for (QueryableIndex qindex : groupByIndices) { QueryRunner runner = makeQueryRunner( diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java index abcf40e1ed4..b013195f6b3 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -49,6 +49,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -211,7 +212,7 @@ public class GroupByQueryMergeBufferTest @Parameters(name = "{0}") public static Collection constructorFeeder() { - final List args = Lists.newArrayList(); + final List args = new ArrayList<>(); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { args.add(new Object[]{runner}); } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index 6b1fa92a858..c51fd2ed4da 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -21,7 +21,6 @@ package org.apache.druid.query.groupby; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.apache.druid.data.input.Row; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; @@ -51,6 +50,7 @@ import org.junit.Test; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -128,7 +128,7 @@ public class GroupByQueryRunnerFactoryTest } ); - Sequence result = mergedRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence result = mergedRunner.run(QueryPlus.wrap(query), new HashMap<>()); List expectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("1970-01-01T00:00:00.000Z", "tags", "t1", "count", 2L), diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java index cac1ac443c6..a54a210bc32 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java @@ -24,7 +24,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.collections.CloseableDefaultBlockingPool; import org.apache.druid.collections.CloseableStupidPool; @@ -54,6 +53,7 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -174,7 +174,7 @@ public class GroupByQueryRunnerFailureTest @Parameters(name = "{0}") public static Collection constructorFeeder() { - final List args = Lists.newArrayList(); + final List args = new ArrayList<>(); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { args.add(new Object[]{runner}); } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java index 50a2d1da2d3..99a1aea4299 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java @@ -27,7 +27,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; @@ -408,7 +407,7 @@ public class GroupByQueryRunnerTest @Parameterized.Parameters(name = "{0}") public static Collection constructorFeeder() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (GroupByQueryConfig config : testConfigs()) { final Pair factoryAndCloser = makeQueryRunnerFactory(config); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; @@ -1536,7 +1535,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()).setAggregatorSpecs(new CountAggregatorFactory("count")) + .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) .setGranularity(QueryRunnerTestHelper.allGran) .setContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 0)) .build(); @@ -2380,7 +2379,7 @@ public class GroupByQueryRunnerTest GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L) ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery), context), "merged"); List allGranExpectedResults = Arrays.asList( @@ -2437,7 +2436,7 @@ public class GroupByQueryRunnerTest QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects( Iterables.limit(expectedResults, limit), mergeRunner.run(QueryPlus.wrap(fullQuery), context), @@ -2481,7 +2480,7 @@ public class GroupByQueryRunnerTest QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects( Iterables.limit(expectedResults, limit), mergeRunner.run(QueryPlus.wrap(fullQuery), context), @@ -2533,7 +2532,7 @@ public class GroupByQueryRunnerTest QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects( Iterables.limit(expectedResults, limit), mergeRunner.run(QueryPlus.wrap(fullQuery), context), @@ -2652,7 +2651,7 @@ public class GroupByQueryRunnerTest } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery), context), "merged"); } @@ -2683,7 +2682,7 @@ public class GroupByQueryRunnerTest GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 6L, "idx", 4420L) ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); @@ -2775,7 +2774,7 @@ public class GroupByQueryRunnerTest GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 2L, "idx", 269L) ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); TestHelper.assertExpectedObjects( @@ -2813,7 +2812,7 @@ public class GroupByQueryRunnerTest new Object[]{"2011-04-01", "technology", 2L, 178.24917602539062D} ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); TestHelper.assertExpectedObjects( @@ -2850,7 +2849,7 @@ public class GroupByQueryRunnerTest GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 2L, "idx", 243L) ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); TestHelper.assertExpectedObjects( @@ -3387,7 +3386,7 @@ public class GroupByQueryRunnerTest } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery), context), "merged"); } @@ -3685,7 +3684,7 @@ public class GroupByQueryRunnerTest } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects(expectedResults, mergedRunner.run(QueryPlus.wrap(fullQuery), context), "merged"); } @@ -3787,7 +3786,7 @@ public class GroupByQueryRunnerTest } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); // add an extra layer of merging, simulate broker forwarding query to historical TestHelper.assertExpectedObjects( expectedResults, @@ -3835,7 +3834,7 @@ public class GroupByQueryRunnerTest ); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); } @@ -3888,7 +3887,7 @@ public class GroupByQueryRunnerTest GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "billy", null, "quality", "travel", "rows", 2L) ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); QueryRunner mergeRunner = factory.getToolchest().mergeResults(runner); TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(QueryPlus.wrap(query), context), "no-limit"); } @@ -5070,7 +5069,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(new ArrayList<>()) .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, filter)) .setGranularity(QueryRunnerTestHelper.allGran) .build(); @@ -5100,7 +5099,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(new ArrayList<>()) .setDimFilter(firstDaysFilter) .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, fridayFilter)) .setGranularity(QueryRunnerTestHelper.dayGran) @@ -5136,7 +5135,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()).setAggregatorSpecs(new CountAggregatorFactory("count")) + .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) .setGranularity(QueryRunnerTestHelper.allGran) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 10000)) .build(); @@ -5164,7 +5163,7 @@ public class GroupByQueryRunnerTest .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setVirtualColumns(new ExpressionVirtualColumn("expr", "1", ValueType.FLOAT, TestExprMacroTable.INSTANCE)) - .setDimensions(Lists.newArrayList()).setAggregatorSpecs(new LongSumAggregatorFactory("count", "expr")) + .setDimensions(new ArrayList<>()).setAggregatorSpecs(new LongSumAggregatorFactory("count", "expr")) .setGranularity(QueryRunnerTestHelper.allGran) .build(); @@ -5191,7 +5190,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()).setAggregatorSpecs(new CardinalityAggregatorFactory( + .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CardinalityAggregatorFactory( "car", ImmutableList.of(new DefaultDimensionSpec( "quality", @@ -5230,7 +5229,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()).setAggregatorSpecs(new CountAggregatorFactory("count")) + .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) .setGranularity(QueryRunnerTestHelper.allGran) .build(); @@ -5497,7 +5496,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(QueryRunnerTestHelper.dataSource) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) + .setDimensions(new ArrayList<>()) .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index"), new HyperUniquesAggregatorFactory("quality_uniques_inner", "quality_uniques")) @@ -5513,7 +5512,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setDimensions(Lists.newArrayList()) + .setDimensions(new ArrayList<>()) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx"), new HyperUniquesAggregatorFactory("quality_uniques_outer", "quality_uniques_inner_post")) @@ -5563,7 +5562,7 @@ public class GroupByQueryRunnerTest .builder() .setDataSource(subquery) .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnInterval) - .setDimensions(Lists.newArrayList()) + .setDimensions(new ArrayList<>()) .setAggregatorSpecs(new LongFirstAggregatorFactory("first", "innerfirst"), new LongLastAggregatorFactory("last", "innerlast")) .setGranularity(QueryRunnerTestHelper.monthGran) @@ -6285,7 +6284,7 @@ public class GroupByQueryRunnerTest ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); - List bySegmentResults = Lists.newArrayList(); + List bySegmentResults = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { bySegmentResults.add(singleSegmentResult); } @@ -6301,7 +6300,7 @@ public class GroupByQueryRunnerTest final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); - List> singleSegmentRunners = Lists.newArrayList(); + List> singleSegmentRunners = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } @@ -6315,7 +6314,7 @@ public class GroupByQueryRunnerTest TestHelper.assertExpectedObjects( bySegmentResults, - theRunner.run(QueryPlus.wrap(fullQuery), Maps.newHashMap()), + theRunner.run(QueryPlus.wrap(fullQuery), new HashMap<>()), "" ); exec.shutdownNow(); @@ -6342,7 +6341,7 @@ public class GroupByQueryRunnerTest ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); - List bySegmentResults = Lists.newArrayList(); + List bySegmentResults = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { bySegmentResults.add(singleSegmentResult); } @@ -6366,7 +6365,7 @@ public class GroupByQueryRunnerTest final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); - List> singleSegmentRunners = Lists.newArrayList(); + List> singleSegmentRunners = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } @@ -6378,7 +6377,7 @@ public class GroupByQueryRunnerTest ) ); - TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), Maps.newHashMap()), ""); + TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), new HashMap<>()), ""); exec.shutdownNow(); } @@ -6402,7 +6401,7 @@ public class GroupByQueryRunnerTest ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); - List bySegmentResults = Lists.newArrayList(); + List bySegmentResults = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { bySegmentResults.add(singleSegmentResult); } @@ -6426,7 +6425,7 @@ public class GroupByQueryRunnerTest final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); - List> singleSegmentRunners = Lists.newArrayList(); + List> singleSegmentRunners = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } @@ -6438,7 +6437,7 @@ public class GroupByQueryRunnerTest ) ); - TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), Maps.newHashMap()), ""); + TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), new HashMap<>()), ""); exec.shutdownNow(); } @@ -6876,7 +6875,7 @@ public class GroupByQueryRunnerTest ), "testSegment", Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); - List bySegmentResults = Lists.newArrayList(); + List bySegmentResults = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { bySegmentResults.add(singleSegmentResult); } @@ -6923,7 +6922,7 @@ public class GroupByQueryRunnerTest final GroupByQuery fullQuery = builder.build(); QueryToolChest toolChest = factory.getToolchest(); - List> singleSegmentRunners = Lists.newArrayList(); + List> singleSegmentRunners = new ArrayList<>(); for (int i = 0; i < segmentCount; i++) { singleSegmentRunners.add(toolChest.preMergeQueryDecoration(runner)); } @@ -6935,7 +6934,7 @@ public class GroupByQueryRunnerTest ) ); - TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), Maps.newHashMap()), ""); + TestHelper.assertExpectedObjects(bySegmentResults, theRunner.run(QueryPlus.wrap(fullQuery), new HashMap<>()), ""); exec.shutdownNow(); } @@ -8264,7 +8263,7 @@ public class GroupByQueryRunnerTest } } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); List allGranExpectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 2L, "idx", 243L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "technology", "rows", 2L, "idx", 177L), @@ -8331,7 +8330,7 @@ public class GroupByQueryRunnerTest } } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); List allGranExpectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 6L, "idx", 4420L), @@ -8401,7 +8400,7 @@ public class GroupByQueryRunnerTest } } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); List allGranExpectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "market", "spot", "rows", 2L, "idx", 243L), @@ -8484,7 +8483,7 @@ public class GroupByQueryRunnerTest } } ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); List allGranExpectedResults = Arrays.asList( GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "market", "spot", "rows", 2L, "idx", 243L), diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTestHelper.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTestHelper.java index bef65e7cb6d..f59b631c3d3 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTestHelper.java @@ -20,7 +20,6 @@ package org.apache.druid.query.groupby; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; @@ -36,7 +35,9 @@ import org.apache.druid.segment.column.ColumnHolder; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -53,7 +54,7 @@ public class GroupByQueryRunnerTestHelper toolChest ); - Sequence queryResult = theRunner.run(QueryPlus.wrap(query), Maps.newHashMap()); + Sequence queryResult = theRunner.run(QueryPlus.wrap(query), new HashMap<>()); return queryResult.toList(); } @@ -66,7 +67,7 @@ public class GroupByQueryRunnerTestHelper { Preconditions.checkArgument(vals.length % 2 == 0); - Map theVals = Maps.newHashMap(); + Map theVals = new HashMap<>(); for (int i = 0; i < vals.length; i += 2) { theVals.put(vals[i].toString(), vals[i + 1]); } @@ -79,7 +80,7 @@ public class GroupByQueryRunnerTestHelper int timeIndex = Arrays.asList(columnNames).indexOf(ColumnHolder.TIME_COLUMN_NAME); Preconditions.checkArgument(timeIndex >= 0); - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); for (Object[] value : values) { Preconditions.checkArgument(value.length == columnNames.length); Map theVals = Maps.newHashMapWithExpectedSize(value.length); diff --git a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java index 8e1ae315547..1bf936dbec6 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/NestedQueryPushDownTest.java @@ -25,8 +25,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.ListenableFuture; import org.apache.commons.io.FileUtils; @@ -89,6 +87,7 @@ import org.junit.Test; import java.io.File; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -106,8 +105,8 @@ public class NestedQueryPushDownTest private File tmpDir; private QueryRunnerFactory groupByFactory; private QueryRunnerFactory groupByFactory2; - private List incrementalIndices = Lists.newArrayList(); - private List groupByIndices = Lists.newArrayList(); + private List incrementalIndices = new ArrayList<>(); + private List groupByIndices = new ArrayList<>(); private ExecutorService executorService; static { @@ -416,7 +415,7 @@ public class NestedQueryPushDownTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -469,7 +468,7 @@ public class NestedQueryPushDownTest .setGranularity(Granularities.ALL) .build(); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Row expectedRow0 = GroupByQueryRunnerTestHelper.createExpectedRow( @@ -531,7 +530,7 @@ public class NestedQueryPushDownTest .setQuerySegmentSpec(intervalSpec) .build(); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Assert.assertEquals(0, results.size()); @@ -580,7 +579,7 @@ public class NestedQueryPushDownTest "finalSum", 4000L, "newDimA", "mango" ); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Assert.assertEquals(1, results.size()); @@ -630,7 +629,7 @@ public class NestedQueryPushDownTest "finalSum", 4000L, "newDimA", "mango" ); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Assert.assertEquals(1, results.size()); @@ -681,7 +680,7 @@ public class NestedQueryPushDownTest "finalSum", 4000L, "extractedDimA", "replacement" ); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Assert.assertEquals(2, results.size()); @@ -727,7 +726,7 @@ public class NestedQueryPushDownTest "dimB", "sweet", "finalSum", 90L ); - Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, Maps.newHashMap()); + Sequence queryResult = runNestedQueryWithForcePushDown(nestedQuery, new HashMap<>()); List results = queryResult.toList(); Assert.assertEquals(1, results.size()); @@ -849,7 +848,7 @@ public class NestedQueryPushDownTest private List> getQueryRunnerForSegment1() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index = groupByIndices.get(0); QueryRunner runner = makeQueryRunnerForSegment( groupByFactory, @@ -862,7 +861,7 @@ public class NestedQueryPushDownTest private List> getQueryRunnerForSegment2() { - List> runners = Lists.newArrayList(); + List> runners = new ArrayList<>(); QueryableIndex index2 = groupByIndices.get(1); QueryRunner tooSmallRunner = makeQueryRunnerForSegment( groupByFactory2, diff --git a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java index 53a74c75ea8..c28657a1c55 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/BufferHashGrouperTest.java @@ -45,6 +45,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.nio.MappedByteBuffer; import java.nio.channels.FileChannel; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; @@ -127,7 +128,7 @@ public class BufferHashGrouperTest } Assert.assertFalse(grouper.aggregate(expectedMaxSize).isOk()); - final List> expected = Lists.newArrayList(); + final List> expected = new ArrayList<>(); for (int i = 0; i < expectedMaxSize; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{21L, 2L})); } @@ -183,7 +184,7 @@ public class BufferHashGrouperTest } Assert.assertFalse(grouper.aggregate(expectedMaxSize).isOk()); - final List> expected = Lists.newArrayList(); + final List> expected = new ArrayList<>(); for (int i = 0; i < expectedMaxSize; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{21L, 2L})); } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java index 9e38304fd82..63245c78b30 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeapTest.java @@ -69,7 +69,7 @@ public class ByteBufferMinMaxOffsetHeapTest Collections.sort(values); List expected = values.subList(0, limit); - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); for (int i = 0; i < limit; i++) { int min = heap.removeMin(); actual.add(min); @@ -86,11 +86,11 @@ public class ByteBufferMinMaxOffsetHeapTest Random rng = new Random(999); - ArrayList values = Lists.newArrayList(); + ArrayList values = new ArrayList<>(); for (int i = 0; i < 10000; i++) { values.add(rng.nextInt(1000000)); } - ArrayList deletedValues = Lists.newArrayList(); + ArrayList deletedValues = new ArrayList<>(); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); @@ -126,7 +126,7 @@ public class ByteBufferMinMaxOffsetHeapTest Assert.assertTrue(heap.getHeapSize() <= limit); List expected = values.subList(0, heap.getHeapSize()); - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); int initialHeapSize = heap.getHeapSize(); for (int i = 0; i < initialHeapSize; i++) { int min = heap.removeMin(); @@ -143,11 +143,11 @@ public class ByteBufferMinMaxOffsetHeapTest Random rng = new Random(9999); - ArrayList values = Lists.newArrayList(); + ArrayList values = new ArrayList<>(); for (int i = 0; i < 20000; i++) { values.add(rng.nextInt(1000000)); } - ArrayList deletedValues = Lists.newArrayList(); + ArrayList deletedValues = new ArrayList<>(); ByteBuffer myBuffer = ByteBuffer.allocate(1000000); ByteBufferMinMaxOffsetHeap heap = new ByteBufferMinMaxOffsetHeap(myBuffer, limit, Ordering.natural(), null); @@ -183,7 +183,7 @@ public class ByteBufferMinMaxOffsetHeapTest Assert.assertTrue(heap.getHeapSize() <= limit); List expected = values.subList(0, heap.getHeapSize()); - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); int initialHeapSize = heap.getHeapSize(); for (int i = 0; i < initialHeapSize; i++) { int min = heap.removeMin(); @@ -218,7 +218,7 @@ public class ByteBufferMinMaxOffsetHeapTest Collections.sort(values); values.rem(12); - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); for (int i = 0; i < values.size(); i++) { int min = heap.removeMin(); actual.add(min); @@ -252,7 +252,7 @@ public class ByteBufferMinMaxOffsetHeapTest values.rem(2); Assert.assertTrue(heap.isIntact()); - List actual = Lists.newArrayList(); + List actual = new ArrayList<>(); for (int i = 0; i < values.size(); i++) { int min = heap.removeMin(); actual.add(min); diff --git a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java index ad990a6ed5d..4ce6f4593ea 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/epinephelinae/LimitedBufferHashGrouperTest.java @@ -34,6 +34,7 @@ import org.junit.Test; import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.List; public class LimitedBufferHashGrouperTest @@ -118,7 +119,7 @@ public class LimitedBufferHashGrouperTest Assert.assertEquals(100, grouper.getLimit()); - final List> expected = Lists.newArrayList(); + final List> expected = new ArrayList<>(); for (int i = 0; i < limit; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{11L, 1L})); } @@ -183,7 +184,7 @@ public class LimitedBufferHashGrouperTest } Assert.assertEquals(100, grouper.getLimit()); - final List> expected = Lists.newArrayList(); + final List> expected = new ArrayList<>(); for (int i = 0; i < limit; i++) { expected.add(new Grouper.Entry<>(i, new Object[]{11L, 1L})); } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpecTest.java b/processing/src/test/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpecTest.java index a3d35643b5d..95d49cd51dd 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/orderby/DefaultLimitSpecTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; import org.apache.druid.java.util.common.DateTimes; @@ -42,6 +41,7 @@ import org.apache.druid.segment.column.ValueType; import org.junit.Assert; import org.junit.Test; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -327,7 +327,7 @@ public class DefaultLimitSpecTest { Preconditions.checkArgument(vals.length % 2 == 0); - Map theVals = Maps.newHashMap(); + Map theVals = new HashMap<>(); for (int i = 0; i < vals.length; i += 2) { theVals.put(vals[i].toString(), vals[i + 1]); } diff --git a/processing/src/test/java/org/apache/druid/query/lookup/LookupExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/lookup/LookupExtractionFnTest.java index c85b492c7a0..08f41c785e9 100644 --- a/processing/src/test/java/org/apache/druid/query/lookup/LookupExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/lookup/LookupExtractionFnTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.druid.common.config.NullHandling; import org.apache.druid.jackson.DefaultObjectMapper; @@ -37,6 +36,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -167,7 +167,7 @@ public class LookupExtractionFnTest // skip return; } - final Map weirdMap = Maps.newHashMap(); + final Map weirdMap = new HashMap<>(); weirdMap.put("foobar", null); final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn( diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index 3d9d8dd8379..c841af498d0 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -23,7 +23,6 @@ package org.apache.druid.query.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.CacheStrategy; @@ -44,6 +43,7 @@ import org.joda.time.Period; import org.junit.Assert; import org.junit.Test; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -113,7 +113,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, ImmutableMap.of( @@ -127,7 +127,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, ImmutableMap.of( @@ -163,7 +163,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -174,7 +174,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, ImmutableMap.of( @@ -202,7 +202,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -213,7 +213,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -232,7 +232,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, ImmutableMap.of( @@ -246,7 +246,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, ImmutableMap.of( @@ -259,7 +259,7 @@ public class SegmentMetadataQueryQueryToolChestTest null ); - final Map expectedLenient = Maps.newHashMap(); + final Map expectedLenient = new HashMap<>(); expectedLenient.put("foo", new LongSumAggregatorFactory("foo", "foo")); expectedLenient.put("bar", null); expectedLenient.put("baz", new LongMaxAggregatorFactory("baz", "baz")); @@ -314,7 +314,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -325,7 +325,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -336,7 +336,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis3 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -347,7 +347,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis4 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, @@ -358,7 +358,7 @@ public class SegmentMetadataQueryQueryToolChestTest final SegmentAnalysis analysis5 = new SegmentAnalysis( "id", null, - Maps.newHashMap(), + new HashMap<>(), 0, 0, null, diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java index af912fdc71f..13e1d3ec4d9 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.jackson.DefaultObjectMapper; @@ -64,6 +63,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -257,7 +257,7 @@ public class SegmentMetadataQueryTest @SuppressWarnings("unchecked") public void testSegmentMetadataQuery() { - List results = runner1.run(QueryPlus.wrap(testQuery), Maps.newHashMap()).toList(); + List results = runner1.run(QueryPlus.wrap(testQuery), new HashMap<>()).toList(); Assert.assertEquals(Collections.singletonList(expectedSegmentAnalysis1), results); } @@ -324,7 +324,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -392,7 +392,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -460,7 +460,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -571,7 +571,7 @@ public class SegmentMetadataQueryTest TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -629,7 +629,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -638,7 +638,7 @@ public class SegmentMetadataQueryTest @Test public void testSegmentMetadataQueryWithAggregatorsMerge() { - final Map expectedAggregators = Maps.newHashMap(); + final Map expectedAggregators = new HashMap<>(); for (AggregatorFactory agg : TestIndex.METRIC_AGGS) { expectedAggregators.put(agg.getName(), agg.getCombiningFactory()); } @@ -691,7 +691,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -749,7 +749,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -807,7 +807,7 @@ public class SegmentMetadataQueryTest .build(); TestHelper.assertExpectedObjects( ImmutableList.of(mergedSegmentAnalysis), - myRunner.run(QueryPlus.wrap(query), Maps.newHashMap()), + myRunner.run(QueryPlus.wrap(query), new HashMap<>()), "failed SegmentMetadata merging query" ); exec.shutdownNow(); @@ -846,7 +846,7 @@ public class SegmentMetadataQueryTest ImmutableList.of(bySegmentResult, bySegmentResult), myRunner.run( QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of("bySegment", true))), - Maps.newHashMap() + new HashMap<>() ), "failed SegmentMetadata bySegment query" ); diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java index b15d1970ffd..e98f1d11f6a 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -21,7 +21,6 @@ package org.apache.druid.query.metadata; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.query.Druids; import org.apache.druid.query.QueryPlus; @@ -42,6 +41,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Collections; +import java.util.HashMap; import java.util.List; @RunWith(Parameterized.class) @@ -127,7 +127,7 @@ public class SegmentMetadataUnionQueryTest SegmentMetadataQuery.AnalysisType.MINMAX ) .build(); - List result = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + List result = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); TestHelper.assertExpectedObjects(ImmutableList.of(expected), result, "failed SegmentMetadata union query"); } diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java index f4aaa98722a..b280decf0e2 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java @@ -23,7 +23,6 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; @@ -418,10 +417,10 @@ public class ScanQueryRunnerTest .columns(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.indexMetric) .build(); - Iterable results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); Iterable resultsOptimize = toolChest .postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))) - .run(QueryPlus.wrap(query), Maps.newHashMap()) + .run(QueryPlus.wrap(query), new HashMap<>()) .toList(); final List>> events = toEvents( @@ -477,7 +476,7 @@ public class ScanQueryRunnerTest ) .build(); - Iterable results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); List expectedResults = Collections.emptyList(); @@ -492,7 +491,7 @@ public class ScanQueryRunnerTest .columns("foo", "foo2") .build(); - Iterable results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); final List>> events = toEvents( legacy ? new String[]{getTimestampName() + ":TIME"} : new String[0], @@ -538,11 +537,11 @@ public class ScanQueryRunnerTest private List>> toEvents(final String[] dimSpecs, final String[]... valueSet) { - List values = Lists.newArrayList(); + List values = new ArrayList<>(); for (String[] vSet : valueSet) { values.addAll(Arrays.asList(vSet)); } - List>> events = Lists.newArrayList(); + List>> events = new ArrayList<>(); events.add( Lists.newArrayList( Iterables.transform( @@ -552,7 +551,7 @@ public class ScanQueryRunnerTest @Override public Map apply(String input) { - Map event = Maps.newHashMap(); + Map event = new HashMap<>(); String[] values = input.split("\\t"); for (int i = 0; i < dimSpecs.length; i++) { if (dimSpecs[i] == null || i >= dimSpecs.length) { diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java index 007c1509876..cc3b13a0b6f 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -20,8 +20,6 @@ package org.apache.druid.query.search; import com.google.common.base.Suppliers; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.CharSource; import org.apache.druid.java.util.common.DateTimes; @@ -40,12 +38,15 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeMap; import static org.apache.druid.query.QueryRunnerTestHelper.NOOP_QUERYWATCHER; import static org.apache.druid.query.QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator; @@ -88,7 +89,7 @@ public class SearchQueryRunnerWithCaseTest QueryableIndex index3 = TestIndex.persistRealtimeAndLoadMMapped(index1); QueryableIndex index4 = TestIndex.persistRealtimeAndLoadMMapped(index2); - final List>> runners = Lists.newArrayList(); + final List>> runners = new ArrayList<>(); for (SearchQueryConfig config : configs) { runners.addAll(Arrays.asList( makeQueryRunner( @@ -154,7 +155,7 @@ public class SearchQueryRunnerWithCaseTest public void testSearch() { Druids.SearchQueryBuilder builder = testBuilder(); - Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); + Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); SearchQuery searchQuery; searchQuery = builder.query("SPOT").build(); @@ -176,7 +177,7 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Arrays.asList(placementDimension, placementishDimension)); - Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); + Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("PREFERRED").build(); expectedResults.put(placementDimension, Sets.newHashSet("PREFERRED", "preferred", "PREFERRed")); @@ -196,7 +197,7 @@ public class SearchQueryRunnerWithCaseTest Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z"); - Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); + Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("otive").build(); expectedResults.put(qualityDimension, Sets.newHashSet("AutoMotive")); @@ -210,10 +211,10 @@ public class SearchQueryRunnerWithCaseTest Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Collections.singletonList(qualityDimension)) .intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z"); - Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); + Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("business").build(); - expectedResults.put(qualityDimension, Sets.newHashSet()); + expectedResults.put(qualityDimension, new HashSet<>()); checkSearchQuery(searchQuery, expectedResults); } @@ -221,7 +222,7 @@ public class SearchQueryRunnerWithCaseTest public void testFragmentSearch() { Druids.SearchQueryBuilder builder = testBuilder(); - Map> expectedResults = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER); + Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); SearchQuery searchQuery; searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build(); diff --git a/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java index dc5ca3cf128..289f9082cfe 100644 --- a/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java @@ -23,7 +23,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.io.CharSource; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; @@ -61,6 +60,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -153,7 +153,7 @@ public class MultiSegmentSelectQueryTest timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1)); timeline.add(index2.getInterval(), "v2", new SingleElementPartitionChunk(segment_override)); - segmentIdentifiers = Lists.newArrayList(); + segmentIdentifiers = new ArrayList<>(); for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-14"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } diff --git a/processing/src/test/java/org/apache/druid/query/select/PagingOffsetTest.java b/processing/src/test/java/org/apache/druid/query/select/PagingOffsetTest.java index 4e164d8c942..9f6f26484d5 100644 --- a/processing/src/test/java/org/apache/druid/query/select/PagingOffsetTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/PagingOffsetTest.java @@ -19,11 +19,11 @@ package org.apache.druid.query.select; -import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.List; public class PagingOffsetTest @@ -62,7 +62,7 @@ public class PagingOffsetTest private int[] toArray(PagingOffset offset) { - List ints = Lists.newArrayList(); + List ints = new ArrayList<>(); for (; offset.hasNext(); offset.next()) { ints.add(offset.current()); } diff --git a/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java index edd93b7cb35..058d0bcb50e 100644 --- a/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java @@ -25,7 +25,6 @@ import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.ObjectArrays; import com.google.common.collect.Sets; import org.apache.druid.jackson.DefaultObjectMapper; @@ -63,6 +62,7 @@ import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -411,7 +411,7 @@ public class SelectQueryRunnerTest .pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3)) .build(); - Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId); List> expectedResults = toExpected( @@ -562,10 +562,10 @@ public class SelectQueryRunnerTest .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) .build(); - Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); Iterable> resultsOptimize = toolChest .postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))) - .run(QueryPlus.wrap(query), Maps.newHashMap()) + .run(QueryPlus.wrap(query), new HashMap<>()) .toList(); final List>> events = toEvents( @@ -616,7 +616,7 @@ public class SelectQueryRunnerTest ) .build(); - Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); List> expectedResults = Collections.singletonList( new Result( @@ -644,7 +644,7 @@ public class SelectQueryRunnerTest "indexFloat", "indexMaxFloat" ), - Lists.newArrayList() + new ArrayList<>() ) ) ); @@ -661,7 +661,7 @@ public class SelectQueryRunnerTest .metrics(Collections.singletonList("foo2")) .build(); - Iterable> results = runner.run(QueryPlus.wrap(query), Maps.newHashMap()).toList(); + Iterable> results = runner.run(QueryPlus.wrap(query), new HashMap<>()).toList(); final List>> events = toEvents( new String[]{ @@ -935,7 +935,7 @@ public class SelectQueryRunnerTest private List>> toEvents(final String[] dimSpecs, final String[]... valueSet) { - List>> events = Lists.newArrayList(); + List>> events = new ArrayList<>(); for (String[] values : valueSet) { events.add( Lists.newArrayList( @@ -945,7 +945,7 @@ public class SelectQueryRunnerTest @Override public Map apply(String input) { - Map event = Maps.newHashMap(); + Map event = new HashMap<>(); String[] values = input.split("\\t"); for (int i = 0; i < dimSpecs.length; i++) { if (dimSpecs[i] == null || i >= dimSpecs.length || i >= values.length) { diff --git a/processing/src/test/java/org/apache/druid/query/spec/SpecificSegmentQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/spec/SpecificSegmentQueryRunnerTest.java index 9acfb321979..930e3217503 100644 --- a/processing/src/test/java/org/apache/druid/query/spec/SpecificSegmentQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/spec/SpecificSegmentQueryRunnerTest.java @@ -21,8 +21,6 @@ package org.apache.druid.query.spec; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; @@ -48,7 +46,9 @@ import org.junit.Assert; import org.junit.Test; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -93,7 +93,7 @@ public class SpecificSegmentQueryRunnerTest ); // from accumulate - Map responseContext = Maps.newHashMap(); + Map responseContext = new HashMap<>(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) @@ -109,13 +109,13 @@ public class SpecificSegmentQueryRunnerTest validate(mapper, descriptor, responseContext); // from toYielder - responseContext = Maps.newHashMap(); + responseContext = new HashMap<>(); results = queryRunner.run(QueryPlus.wrap(query), responseContext); results.toYielder( null, new YieldingAccumulator() { - final List lists = Lists.newArrayList(); + final List lists = new ArrayList<>(); @Override public Object accumulate(Object accumulated, Object in) { @@ -171,7 +171,7 @@ public class SpecificSegmentQueryRunnerTest ) ); - final Map responseContext = Maps.newHashMap(); + final Map responseContext = new HashMap<>(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource("foo") .granularity(Granularities.ALL) diff --git a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 5c266e135b5..06c7eaa2bb0 100644 --- a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -21,7 +21,6 @@ package org.apache.druid.query.timeboundary; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.io.CharSource; import org.apache.commons.lang.StringUtils; import org.apache.druid.java.util.common.DateTimes; @@ -53,6 +52,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -157,7 +157,7 @@ public class TimeBoundaryQueryRunnerTest timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk(segment0)); timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1)); - segmentIdentifiers = Lists.newArrayList(); + segmentIdentifiers = new ArrayList<>(); for (TimelineObjectHolder holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-17"))) { segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion())); } @@ -233,7 +233,7 @@ public class TimeBoundaryQueryRunnerTest .bound(TimeBoundaryQuery.MAX_TIME) .build(); Map context = new ConcurrentHashMap<>(); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); Iterable> results = runner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList(); TimeBoundaryResultValue val = results.iterator().next().getValue(); DateTime minTime = val.getMinTime(); @@ -252,7 +252,7 @@ public class TimeBoundaryQueryRunnerTest .bound(TimeBoundaryQuery.MIN_TIME) .build(); Map context = new ConcurrentHashMap<>(); - context.put(Result.MISSING_SEGMENTS_KEY, Lists.newArrayList()); + context.put(Result.MISSING_SEGMENTS_KEY, new ArrayList<>()); Iterable> results = runner.run(QueryPlus.wrap(timeBoundaryQuery), context).toList(); TimeBoundaryResultValue val = results.iterator().next().getValue(); DateTime minTime = val.getMinTime(); @@ -295,7 +295,7 @@ public class TimeBoundaryQueryRunnerTest @Test public void testMergeResultsEmptyResults() { - List> results = Lists.newArrayList(); + List> results = new ArrayList<>(); TimeBoundaryQuery query = new TimeBoundaryQuery(new TableDataSource("test"), null, null, null, null); Iterable> actual = query.mergeResults(results); diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 2628d77ffee..668ec933e13 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,7 +23,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Doubles; import org.apache.druid.common.config.NullHandling; import org.apache.druid.java.util.common.DateTimes; @@ -146,7 +145,7 @@ public class TimeseriesQueryRunnerTest ) .descending(descending) .build(); - Map resultMap = Maps.newHashMap(); + Map resultMap = new HashMap<>(); resultMap.put("rows", 0L); resultMap.put("index", NullHandling.defaultDoubleValue()); resultMap.put("first", NullHandling.defaultDoubleValue()); @@ -805,11 +804,11 @@ public class TimeseriesQueryRunnerTest .descending(descending) .build(); - List> lotsOfZeroes = Lists.newArrayList(); + List> lotsOfZeroes = new ArrayList<>(); final Iterable iterable = Granularities.HOUR.getIterable( new Interval(DateTimes.of("2011-04-14T01"), DateTimes.of("2011-04-15")) ); - Map noRowsResult = Maps.newHashMap(); + Map noRowsResult = new HashMap<>(); noRowsResult.put("rows", 0L); noRowsResult.put("idx", NullHandling.defaultLongValue()); for (Interval interval : iterable) { @@ -1522,7 +1521,7 @@ public class TimeseriesQueryRunnerTest .descending(descending) .build(); - Map resultMap = Maps.newHashMap(); + Map resultMap = new HashMap<>(); resultMap.put("rows", 0L); resultMap.put("index", NullHandling.defaultDoubleValue()); resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null); @@ -1663,7 +1662,7 @@ public class TimeseriesQueryRunnerTest .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) .descending(descending) .build(); - Map resultMap = Maps.newHashMap(); + Map resultMap = new HashMap<>(); resultMap.put("rows", 0L); resultMap.put("index", NullHandling.defaultDoubleValue()); resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null); @@ -1704,7 +1703,7 @@ public class TimeseriesQueryRunnerTest .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) .descending(descending) .build(); - Map resultMap = Maps.newHashMap(); + Map resultMap = new HashMap<>(); resultMap.put("rows", 0L); resultMap.put("index", NullHandling.defaultDoubleValue()); resultMap.put("addRowsIndexConstant", NullHandling.replaceWithDefault() ? 1.0 : null); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java index b8207b9381a..290d1c7d87c 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -22,7 +22,6 @@ package org.apache.druid.query.topn; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.druid.collections.CloseableStupidPool; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; @@ -54,6 +53,7 @@ import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.Map; public class TopNQueryQueryToolChestTest @@ -149,7 +149,7 @@ public class TopNQueryQueryToolChestTest null ); - Map context = Maps.newHashMap(); + Map context = new HashMap<>(); context.put("minTopNThreshold", 500); TopNQueryBuilder builder = new TopNQueryBuilder() diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java index 5bf9f8eb330..a0c1eea0e37 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java @@ -24,7 +24,6 @@ import com.carrotsearch.junitbenchmarks.BenchmarkOptions; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.collections.StupidPool; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; @@ -80,7 +79,7 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark ) .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - private static final Map testCaseMap = Maps.newHashMap(); + private static final Map testCaseMap = new HashMap<>(); @BeforeClass public static void setUp() diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java index 5ebca0849de..98fa0135f2c 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Doubles; import com.google.common.primitives.Longs; @@ -154,7 +153,7 @@ public class TopNQueryRunnerTest () -> ByteBuffer.allocate(20000) ); - List>> retVal = Lists.newArrayList(); + List>> retVal = new ArrayList<>(); retVal.addAll( QueryRunnerTestHelper.makeQueryRunners( new TopNQueryRunnerFactory( @@ -3918,7 +3917,7 @@ public class TopNQueryRunnerTest .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("null_column", null); map.put("rows", 1209L); map.put("index", 503332.5071372986D); @@ -3966,7 +3965,7 @@ public class TopNQueryRunnerTest .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("null_column", null); map.put("rows", 1209L); map.put("index", 503332.5071372986D); @@ -4000,7 +3999,7 @@ public class TopNQueryRunnerTest .aggregators(commonAggregators) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("partial_null_column", null); map.put("rows", 22L); map.put("index", 7583.691513061523D); @@ -4038,7 +4037,7 @@ public class TopNQueryRunnerTest .aggregators(commonAggregators) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("partial_null_column", null); map.put("rows", 22L); map.put("index", 7583.691513061523D); @@ -4257,7 +4256,7 @@ public class TopNQueryRunnerTest .filters(extractionFilter) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("null_column", null); map.put("rows", 1209L); map.put("index", 503332.5071372986D); @@ -4328,7 +4327,7 @@ public class TopNQueryRunnerTest .filters(extractionFilter) .build(); - Map map = Maps.newHashMap(); + Map map = new HashMap<>(); map.put("null_column", null); map.put("rows", 1209L); map.put("index", 503332.5071372986D); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTestHelper.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTestHelper.java index aa3f9f9cbbe..813c2352ef4 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTestHelper.java @@ -20,11 +20,11 @@ package org.apache.druid.query.topn; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.query.Result; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -34,7 +34,7 @@ public class TopNQueryRunnerTestHelper { public static Result createExpectedRows(String date, String[] columnNames, Iterable values) { - List expected = Lists.newArrayList(); + List expected = new ArrayList<>(); for (Object[] value : values) { Preconditions.checkArgument(value.length == columnNames.length); Map theVals = Maps.newHashMapWithExpectedSize(value.length); diff --git a/processing/src/test/java/org/apache/druid/segment/AppendTest.java b/processing/src/test/java/org/apache/druid/segment/AppendTest.java index 6b52e122b35..de41034f152 100644 --- a/processing/src/test/java/org/apache/druid/segment/AppendTest.java +++ b/processing/src/test/java/org/apache/druid/segment/AppendTest.java @@ -65,6 +65,7 @@ import org.junit.Ignore; import org.junit.Test; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -449,7 +450,7 @@ public class AppendTest new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Lists.>newArrayList() + new ArrayList>() ) ) ); diff --git a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java index cbf5a4a8baf..324113a8f65 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexBuilder.java @@ -36,6 +36,7 @@ import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; @@ -58,7 +59,7 @@ public class IndexBuilder private IndexSpec indexSpec = new IndexSpec(); private int maxRows = DEFAULT_MAX_ROWS; - private final List rows = Lists.newArrayList(); + private final List rows = new ArrayList<>(); private IndexBuilder() { @@ -131,7 +132,7 @@ public class IndexBuilder IndexMerger indexMerger = TestHelper.getTestIndexMergerV9(segmentWriteOutMediumFactory); Preconditions.checkNotNull(tmpDir, "tmpDir"); - final List persisted = Lists.newArrayList(); + final List persisted = new ArrayList<>(); try { for (int i = 0; i < rows.size(); i += ROWS_PER_INDEX_FOR_MERGING) { persisted.add( diff --git a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java index 18b93087282..806e7600611 100644 --- a/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IndexMergerV9WithSpatialIndexTest.java @@ -22,7 +22,6 @@ package org.apache.druid.segment; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.commons.io.FileUtils; import org.apache.druid.collections.spatial.search.RadiusBound; import org.apache.druid.collections.spatial.search.RectangularBound; @@ -61,6 +60,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; @@ -585,7 +585,7 @@ public class IndexMergerV9WithSpatialIndexTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); @@ -639,7 +639,7 @@ public class IndexMergerV9WithSpatialIndexTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); @@ -728,7 +728,7 @@ public class IndexMergerV9WithSpatialIndexTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java index a5c8db9852b..c2621f43e08 100644 --- a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java @@ -24,7 +24,6 @@ import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.hll.HyperLogLogHash; import org.apache.druid.jackson.DefaultObjectMapper; @@ -57,8 +56,10 @@ import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.net.URL; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -84,11 +85,11 @@ public class SchemalessIndexTest private static final IndexSpec indexSpec = new IndexSpec(); - private static final List> events = Lists.newArrayList(); + private static final List> events = new ArrayList<>(); - private static final Map> incrementalIndexes = Maps.newHashMap(); - private static final Map> mergedIndexes = Maps.newHashMap(); - private static final List rowPersistedIndexes = Lists.newArrayList(); + private static final Map> incrementalIndexes = new HashMap<>(); + private static final Map> mergedIndexes = new HashMap<>(); + private static final List rowPersistedIndexes = new ArrayList<>(); private static IncrementalIndex index = null; private static QueryableIndex mergedIndex = null; @@ -135,7 +136,7 @@ public class SchemalessIndexTest return index; } } else { - entry = Maps.newHashMap(); + entry = new HashMap<>(); incrementalIndexes.put(index1, entry); } @@ -163,7 +164,7 @@ public class SchemalessIndexTest .buildOnheap(); } - final List dims = Lists.newArrayList(); + final List dims = new ArrayList<>(); for (final Map.Entry val : event.entrySet()) { if (!val.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(val.getKey())) { dims.add(val.getKey()); @@ -247,7 +248,7 @@ public class SchemalessIndexTest return index; } } else { - entry = Maps.newHashMap(); + entry = new HashMap<>(); mergedIndexes.put(index1, entry); } @@ -297,7 +298,7 @@ public class SchemalessIndexTest mergedFile.mkdirs(); mergedFile.deleteOnExit(); - List indexesToMerge = Lists.newArrayList(); + List indexesToMerge = new ArrayList<>(); for (int index : indexes) { indexesToMerge.add(rowPersistedIndexes.get(index)); } @@ -362,7 +363,7 @@ public class SchemalessIndexTest for (final Map event : events) { final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); - final List dims = Lists.newArrayList(); + final List dims = new ArrayList<>(); for (Map.Entry entry : event.entrySet()) { if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) { dims.add(entry.getKey()); @@ -422,7 +423,7 @@ public class SchemalessIndexTest for (Object obj : events) { final Map event = jsonMapper.convertValue(obj, Map.class); - final List dims = Lists.newArrayList(); + final List dims = new ArrayList<>(); for (Map.Entry entry : event.entrySet()) { if (!entry.getKey().equalsIgnoreCase(TIMESTAMP) && !METRICS.contains(entry.getKey())) { dims.add(entry.getKey()); @@ -448,7 +449,7 @@ public class SchemalessIndexTest private List makeFilesToMap(File tmpFile, Iterable> files) throws IOException { - List filesToMap = Lists.newArrayList(); + List filesToMap = new ArrayList<>(); for (Pair file : files) { IncrementalIndex index = makeIncrementalIndex(file.lhs, file.rhs); File theFile = new File(tmpFile, file.lhs); diff --git a/processing/src/test/java/org/apache/druid/segment/TestHelper.java b/processing/src/test/java/org/apache/druid/segment/TestHelper.java index c02e99e8055..19741e3bceb 100644 --- a/processing/src/test/java/org/apache/druid/segment/TestHelper.java +++ b/processing/src/test/java/org/apache/druid/segment/TestHelper.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedRow; import org.apache.druid.data.input.Row; import org.apache.druid.jackson.DefaultObjectMapper; @@ -39,6 +38,7 @@ import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory; import org.apache.druid.timeline.DataSegment; import org.junit.Assert; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -341,7 +341,7 @@ public class TestHelper { Preconditions.checkArgument(vals.length % 2 == 0); - Map theVals = Maps.newHashMap(); + Map theVals = new HashMap<>(); for (int i = 0; i < vals.length; i += 2) { theVals.put(vals[i].toString(), vals[i + 1]); } diff --git a/processing/src/test/java/org/apache/druid/segment/data/GenericIndexedTest.java b/processing/src/test/java/org/apache/druid/segment/data/GenericIndexedTest.java index 3ba0d47a5ae..651b8b5b57e 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/GenericIndexedTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/GenericIndexedTest.java @@ -19,7 +19,6 @@ package org.apache.druid.segment.data; -import com.google.common.collect.Maps; import org.junit.Assert; import org.junit.Test; @@ -99,7 +98,7 @@ public class GenericIndexedTest } if (allowReverseLookup) { - HashMap mixedUp = Maps.newHashMap(); + HashMap mixedUp = new HashMap<>(); for (int i = 0; i < strings.length; i++) { mixedUp.put(strings[i], i); } diff --git a/processing/src/test/java/org/apache/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/BaseFilterTest.java index 95e387f89ce..100adf053ae 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/BaseFilterTest.java @@ -24,8 +24,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.common.guava.SettableSupplier; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.Intervals; @@ -73,6 +71,7 @@ import org.junit.rules.TemporaryFolder; import org.junit.runners.Parameterized; import java.io.Closeable; +import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -167,7 +166,7 @@ public abstract class BaseFilterTest public static Collection makeConstructors() { - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); final Map bitmapSerdeFactories = ImmutableMap.of( "concise", new ConciseBitmapSerdeFactory(), @@ -274,7 +273,7 @@ public abstract class BaseFilterTest .getColumnSelectorFactory() .makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn)); - final List values = Lists.newArrayList(); + final List values = new ArrayList<>(); while (!input.isDone()) { IndexedInts row = selector.getRow(); @@ -367,7 +366,7 @@ public abstract class BaseFilterTest .getColumnSelectorFactory() .makeDimensionSelector(new DefaultDimensionSpec(selectColumn, selectColumn)); - final List values = Lists.newArrayList(); + final List values = new ArrayList<>(); while (!input.isDone()) { IndexedInts row = selector.getRow(); @@ -389,7 +388,7 @@ public abstract class BaseFilterTest ) { // Generate rowType - final Map rowSignature = Maps.newHashMap(); + final Map rowSignature = new HashMap<>(); for (String columnName : Iterables.concat(adapter.getAvailableDimensions(), adapter.getAvailableMetrics())) { rowSignature.put(columnName, adapter.getColumnCapabilities(columnName).getType()); } @@ -399,7 +398,7 @@ public abstract class BaseFilterTest final ValueMatcher matcher = makeFilter(filter).makeMatcher( VIRTUAL_COLUMNS.wrap(RowBasedColumnSelectorFactory.create(rowSupplier, rowSignature)) ); - final List values = Lists.newArrayList(); + final List values = new ArrayList<>(); for (InputRow row : rows) { rowSupplier.set(row); if (matcher.matches()) { diff --git a/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java index 8abc51c35a2..3d3d50c971a 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/ExpressionFilterTest.java @@ -47,6 +47,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.Closeable; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -252,7 +253,7 @@ public class ExpressionFilterTest extends BaseFilterTest Assert.assertEquals(EDF("dim2 == '1'").getRequiredColumns(), Sets.newHashSet("dim2")); Assert.assertEquals(EDF("dim3 < '2'").getRequiredColumns(), Sets.newHashSet("dim3")); Assert.assertEquals(EDF("dim4 == ''").getRequiredColumns(), Sets.newHashSet("dim4")); - Assert.assertEquals(EDF("1 + 1").getRequiredColumns(), Sets.newHashSet()); + Assert.assertEquals(EDF("1 + 1").getRequiredColumns(), new HashSet<>()); Assert.assertEquals(EDF("dim0 == dim3").getRequiredColumns(), Sets.newHashSet("dim0", "dim3")); Assert.assertEquals(EDF("missing == ''").getRequiredColumns(), Sets.newHashSet("missing")); } diff --git a/processing/src/test/java/org/apache/druid/segment/filter/InFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/InFilterTest.java index c9d824fb681..77aa41ff9c2 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/InFilterTest.java @@ -48,6 +48,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.Closeable; +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -349,7 +350,7 @@ public class InFilterTest extends BaseFilterTest private DimFilter toInFilter(String dim) { - List emptyList = Lists.newArrayList(); + List emptyList = new ArrayList<>(); return new InDimFilter(dim, emptyList, null); } diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java index af76178c0e5..3ff9fb73b5e 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterBonusTest.java @@ -22,7 +22,6 @@ package org.apache.druid.segment.filter; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.druid.collections.spatial.search.RadiusBound; import org.apache.druid.collections.spatial.search.RectangularBound; import org.apache.druid.data.input.MapBasedInputRow; @@ -71,6 +70,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; @@ -129,7 +129,7 @@ public class SpatialFilterBonusTest Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + new ArrayList<>() ) ) ) @@ -213,7 +213,7 @@ public class SpatialFilterBonusTest ); // Add a bunch of random points, without replacement - Set alreadyChosen = Sets.newHashSet(); + Set alreadyChosen = new HashSet<>(); Random rand = ThreadLocalRandom.current(); for (int i = 6; i < NUM_POINTS; i++) { String coord = null; @@ -277,7 +277,7 @@ public class SpatialFilterBonusTest Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + new ArrayList<>() ) ) ) @@ -301,7 +301,7 @@ public class SpatialFilterBonusTest Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + new ArrayList<>() ) ) ) @@ -324,7 +324,7 @@ public class SpatialFilterBonusTest Collections.singletonList( new SpatialDimensionSchema( "dim.geo", - Lists.newArrayList() + new ArrayList<>() ) ) ) diff --git a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java index 59688540bc0..466c478707c 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/SpatialFilterTest.java @@ -22,7 +22,6 @@ package org.apache.druid.segment.filter; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.collections.spatial.search.RadiusBound; import org.apache.druid.collections.spatial.search.RectangularBound; import org.apache.druid.data.input.MapBasedInputRow; @@ -67,6 +66,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; @@ -574,7 +574,7 @@ public class SpatialFilterTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); @@ -627,7 +627,7 @@ public class SpatialFilterTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); @@ -715,7 +715,7 @@ public class SpatialFilterTest factory.getToolchest() ); - TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), Maps.newHashMap())); + TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query), new HashMap<>())); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java index eb1092c476b..a062aa3a99c 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowCompTest.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.incremental; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.junit.Assert; @@ -28,6 +27,7 @@ import org.junit.Test; import java.util.Arrays; import java.util.Comparator; +import java.util.HashMap; import java.util.Map; /** @@ -76,7 +76,7 @@ public class IncrementalIndexRowCompTest private MapBasedInputRow toMapRow(long time, Object... dimAndVal) { - Map data = Maps.newHashMap(); + Map data = new HashMap<>(); for (int i = 0; i < dimAndVal.length; i += 2) { data.put((String) dimAndVal[i], dimAndVal[i + 1]); } diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java index 096cd01e505..0952c436c4f 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexRowSizeTest.java @@ -19,13 +19,13 @@ package org.apache.druid.segment.incremental; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.query.aggregation.CountAggregatorFactory; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; +import java.util.HashMap; import java.util.Map; /** @@ -88,7 +88,7 @@ public class IncrementalIndexRowSizeTest private MapBasedInputRow toMapRow(long time, Object... dimAndVal) { - Map data = Maps.newHashMap(); + Map data = new HashMap<>(); for (int i = 0; i < dimAndVal.length; i += 2) { data.put((String) dimAndVal[i], dimAndVal[i + 1]); } diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java index 31266af15f5..3dfd3210d40 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/IncrementalIndexTest.java @@ -48,6 +48,7 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -107,7 +108,7 @@ public class IncrementalIndexTest .withMetrics(metrics) .build(); - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (final Boolean sortFacts : ImmutableList.of(false, true)) { constructors.add( new Object[]{ diff --git a/server/src/main/java/org/apache/druid/client/BrokerServerView.java b/server/src/main/java/org/apache/druid/client/BrokerServerView.java index f4e874d8d73..574cdc874b2 100644 --- a/server/src/main/java/org/apache/druid/client/BrokerServerView.java +++ b/server/src/main/java/org/apache/druid/client/BrokerServerView.java @@ -22,7 +22,6 @@ package org.apache.druid.client; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; import org.apache.druid.client.selector.QueryableDruidServer; @@ -45,6 +44,7 @@ import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; import javax.annotation.Nullable; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -98,8 +98,8 @@ public class BrokerServerView implements TimelineServerView this.tierSelectorStrategy = tierSelectorStrategy; this.emitter = emitter; this.clients = new ConcurrentHashMap<>(); - this.selectors = Maps.newHashMap(); - this.timelines = Maps.newHashMap(); + this.selectors = new HashMap<>(); + this.timelines = new HashMap<>(); this.segmentFilter = new Predicate>() { diff --git a/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java b/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java index 3ec8b2db2dd..ad40a06b84f 100644 --- a/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/org/apache/druid/client/CachingClusteredClient.java @@ -25,11 +25,9 @@ import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.RangeSet; -import com.google.common.collect.Sets; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; import com.google.inject.Inject; @@ -80,11 +78,14 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; +import java.util.TreeMap; import java.util.function.UnaryOperator; import java.util.stream.Collectors; @@ -216,7 +217,7 @@ public class CachingClusteredClient implements QuerySegmentWalker private final boolean isBySegment; private final int uncoveredIntervalsLimit; private final Query downstreamQuery; - private final Map cachePopulatorKeyMap = Maps.newHashMap(); + private final Map cachePopulatorKeyMap = new HashMap<>(); private final List intervals; SpecificQueryRunnable(final QueryPlus queryPlus, final Map responseContext) @@ -297,8 +298,8 @@ public class CachingClusteredClient implements QuerySegmentWalker intervals.stream().flatMap(i -> timeline.lookup(i).stream()).collect(Collectors.toList()) ); - final Set segments = Sets.newLinkedHashSet(); - final Map>> dimensionRangeCache = Maps.newHashMap(); + final Set segments = new LinkedHashSet<>(); + final Map>> dimensionRangeCache = new HashMap<>(); // Filter unneeded chunks based on partition dimension for (TimelineObjectHolder holder : serversLookup) { final Set> filteredChunks = DimFilterUtils.filterShards( @@ -405,7 +406,7 @@ public class CachingClusteredClient implements QuerySegmentWalker if (queryCacheKey == null) { return Collections.emptyList(); } - final List> alreadyCachedResults = Lists.newArrayList(); + final List> alreadyCachedResults = new ArrayList<>(); Map perSegmentCacheKeys = computePerSegmentCacheKeys(segments, queryCacheKey); // Pull cached segments from cache and remove from set of segments to query final Map cachedValues = computeCachedValues(perSegmentCacheKeys); @@ -474,7 +475,7 @@ public class CachingClusteredClient implements QuerySegmentWalker private SortedMap> groupSegmentsByServer(Set segments) { - final SortedMap> serverSegments = Maps.newTreeMap(); + final SortedMap> serverSegments = new TreeMap<>(); for (ServerToSegment serverToSegment : segments) { final QueryableDruidServer queryableDruidServer = serverToSegment.getServer().pick(); diff --git a/server/src/main/java/org/apache/druid/client/CoordinatorServerView.java b/server/src/main/java/org/apache/druid/client/CoordinatorServerView.java index c4c10185279..dd2198d21c2 100644 --- a/server/src/main/java/org/apache/druid/client/CoordinatorServerView.java +++ b/server/src/main/java/org/apache/druid/client/CoordinatorServerView.java @@ -20,7 +20,6 @@ package org.apache.druid.client; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; import org.apache.druid.java.util.common.concurrent.Execs; @@ -32,6 +31,7 @@ import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; import java.util.Collection; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -55,8 +55,8 @@ public class CoordinatorServerView implements InventoryView ) { this.baseView = baseView; - this.segmentLoadInfos = Maps.newHashMap(); - this.timelines = Maps.newHashMap(); + this.segmentLoadInfos = new HashMap<>(); + this.timelines = new HashMap<>(); ExecutorService exec = Execs.singleThreaded("CoordinatorServerView-%s"); baseView.registerSegmentCallback( diff --git a/server/src/main/java/org/apache/druid/client/HttpServerInventoryView.java b/server/src/main/java/org/apache/druid/client/HttpServerInventoryView.java index fc0bc6e2f0f..855f5824822 100644 --- a/server/src/main/java/org/apache/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/org/apache/druid/client/HttpServerInventoryView.java @@ -25,7 +25,6 @@ import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.base.Predicates; -import com.google.common.collect.Maps; import com.google.common.net.HostAndPort; import com.google.inject.Inject; import org.apache.druid.concurrent.LifecycleLock; @@ -554,7 +553,7 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer @Override public void fullSync(List changes) { - Map toRemove = Maps.newHashMap(druidServer.getSegments()); + Map toRemove = new HashMap<>(druidServer.getSegments()); for (DataSegmentChangeRequest request : changes) { if (request instanceof SegmentChangeRequestLoad) { diff --git a/server/src/main/java/org/apache/druid/client/ServerViewUtil.java b/server/src/main/java/org/apache/druid/client/ServerViewUtil.java index fd646b013df..8a52cacf299 100644 --- a/server/src/main/java/org/apache/druid/client/ServerViewUtil.java +++ b/server/src/main/java/org/apache/druid/client/ServerViewUtil.java @@ -19,7 +19,6 @@ package org.apache.druid.client; -import com.google.common.collect.Lists; import org.apache.druid.client.selector.ServerSelector; import org.apache.druid.query.DataSource; import org.apache.druid.query.LocatedSegmentDescriptor; @@ -31,6 +30,7 @@ import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.partition.PartitionChunk; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -59,7 +59,7 @@ public class ServerViewUtil if (timeline == null) { return Collections.emptyList(); } - List located = Lists.newArrayList(); + List located = new ArrayList<>(); for (Interval interval : intervals) { for (TimelineObjectHolder holder : timeline.lookup(interval)) { for (PartitionChunk chunk : holder.getObject()) { diff --git a/server/src/main/java/org/apache/druid/client/cache/HybridCache.java b/server/src/main/java/org/apache/druid/client/cache/HybridCache.java index d940f716618..eb74d339e90 100644 --- a/server/src/main/java/org/apache/druid/client/cache/HybridCache.java +++ b/server/src/main/java/org/apache/druid/client/cache/HybridCache.java @@ -19,13 +19,13 @@ package org.apache.druid.client.cache; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import javax.annotation.Nullable; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; @@ -108,7 +108,7 @@ public class HybridCache implements Cache missCount.addAndGet(remaining.size() - size); if (size != 0) { - res = Maps.newHashMap(res); + res = new HashMap<>(res); res.putAll(res2); } } diff --git a/server/src/main/java/org/apache/druid/client/cache/MapCache.java b/server/src/main/java/org/apache/druid/client/cache/MapCache.java index e05c35a515c..2af7971c3be 100644 --- a/server/src/main/java/org/apache/druid/client/cache/MapCache.java +++ b/server/src/main/java/org/apache/druid/client/cache/MapCache.java @@ -19,13 +19,13 @@ package org.apache.druid.client.cache; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -59,7 +59,7 @@ public class MapCache implements Cache this.byteCountingLRUMap = byteCountingLRUMap; this.baseMap = Collections.synchronizedMap(byteCountingLRUMap); - namespaceId = Maps.newHashMap(); + namespaceId = new HashMap<>(); ids = new AtomicInteger(); } @@ -103,7 +103,7 @@ public class MapCache implements Cache @Override public Map getBulk(Iterable keys) { - Map retVal = Maps.newHashMap(); + Map retVal = new HashMap<>(); for (NamedKey key : keys) { final byte[] value = get(key); if (value != null) { diff --git a/server/src/main/java/org/apache/druid/client/cache/MemcachedCache.java b/server/src/main/java/org/apache/druid/client/cache/MemcachedCache.java index a3fd26caf07..3afa1dd5fc0 100644 --- a/server/src/main/java/org/apache/druid/client/cache/MemcachedCache.java +++ b/server/src/main/java/org/apache/druid/client/cache/MemcachedCache.java @@ -542,7 +542,7 @@ public class MemcachedCache implements Cache } ); - Map results = Maps.newHashMap(); + Map results = new HashMap<>(); BulkFuture> future; try { diff --git a/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategy.java b/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategy.java index fb47baaae15..afeacbfad66 100644 --- a/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategy.java +++ b/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategy.java @@ -21,9 +21,9 @@ package org.apache.druid.client.selector; import com.fasterxml.jackson.annotation.JacksonInject; import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.collect.Maps; import java.util.Comparator; +import java.util.HashMap; import java.util.Map; /** @@ -40,7 +40,7 @@ public class CustomTierSelectorStrategy extends AbstractTierSelectorStrategy { super(serverSelectorStrategy); - final Map lookup = Maps.newHashMap(); + final Map lookup = new HashMap<>(); int pos = 0; for (Integer integer : config.getPriorities()) { lookup.put(integer, pos); diff --git a/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategyConfig.java b/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategyConfig.java index 7d5c992220a..3766daf178a 100644 --- a/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategyConfig.java +++ b/server/src/main/java/org/apache/druid/client/selector/CustomTierSelectorStrategyConfig.java @@ -20,8 +20,8 @@ package org.apache.druid.client.selector; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.collect.Lists; +import java.util.ArrayList; import java.util.List; /** @@ -29,7 +29,7 @@ import java.util.List; public class CustomTierSelectorStrategyConfig { @JsonProperty - private List priorities = Lists.newArrayList(); + private List priorities = new ArrayList<>(); public List getPriorities() { diff --git a/server/src/main/java/org/apache/druid/curator/announcement/Announcer.java b/server/src/main/java/org/apache/druid/curator/announcement/Announcer.java index 976b40097ac..50193db6850 100644 --- a/server/src/main/java/org/apache/druid/curator/announcement/Announcer.java +++ b/server/src/main/java/org/apache/druid/curator/announcement/Announcer.java @@ -21,8 +21,6 @@ package org.apache.druid.curator.announcement; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.transaction.CuratorTransaction; import org.apache.curator.framework.api.transaction.CuratorTransactionFinal; @@ -43,6 +41,7 @@ import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.data.Stat; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -65,8 +64,8 @@ public class Announcer private final PathChildrenCacheFactory factory; private final ExecutorService pathChildrenCacheExecutor; - private final List toAnnounce = Lists.newArrayList(); - private final List toUpdate = Lists.newArrayList(); + private final List toAnnounce = new ArrayList<>(); + private final List toUpdate = new ArrayList<>(); private final ConcurrentMap listeners = new ConcurrentHashMap<>(); private final ConcurrentMap> announcements = new ConcurrentHashMap<>(); private final List parentsIBuilt = new CopyOnWriteArrayList(); @@ -258,7 +257,7 @@ public class Announcer // an adversary in the system, they could also delete the ephemeral node before the cache sees // it. This does not protect from that case, so don't have adversaries. - Set pathsToReinstate = Sets.newHashSet(); + Set pathsToReinstate = new HashSet<>(); for (String node : finalSubPaths.keySet()) { String path = ZKPaths.makePath(parentPath, node); log.info("Node[%s] is added to reinstate.", path); diff --git a/server/src/main/java/org/apache/druid/curator/discovery/CuratorServiceAnnouncer.java b/server/src/main/java/org/apache/druid/curator/discovery/CuratorServiceAnnouncer.java index 99bbd3d10cb..aa3739560e5 100644 --- a/server/src/main/java/org/apache/druid/curator/discovery/CuratorServiceAnnouncer.java +++ b/server/src/main/java/org/apache/druid/curator/discovery/CuratorServiceAnnouncer.java @@ -20,13 +20,13 @@ package org.apache.druid.curator.discovery; import com.google.common.base.Throwables; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.curator.x.discovery.ServiceDiscovery; import org.apache.curator.x.discovery.ServiceInstance; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.server.DruidNode; +import java.util.HashMap; import java.util.Map; /** @@ -40,7 +40,7 @@ public class CuratorServiceAnnouncer implements ServiceAnnouncer private static final EmittingLogger log = new EmittingLogger(CuratorServiceAnnouncer.class); private final ServiceDiscovery discovery; - private final Map> instanceMap = Maps.newHashMap(); + private final Map> instanceMap = new HashMap<>(); private final Object monitor = new Object(); @Inject diff --git a/server/src/main/java/org/apache/druid/curator/discovery/DiscoveryModule.java b/server/src/main/java/org/apache/druid/curator/discovery/DiscoveryModule.java index 28bae791a79..0548203df61 100644 --- a/server/src/main/java/org/apache/druid/curator/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/apache/druid/curator/discovery/DiscoveryModule.java @@ -21,7 +21,6 @@ package org.apache.druid.curator.discovery; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; @@ -64,6 +63,7 @@ import org.apache.druid.server.initialization.CuratorDiscoveryConfig; import org.apache.druid.server.initialization.ZkPathsConfig; import java.lang.annotation.Annotation; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -230,7 +230,7 @@ public class DiscoveryModule implements Module public void start() { if (nodes == null) { - nodes = Lists.newArrayList(); + nodes = new ArrayList<>(); for (KeyHolder holder : nodesToAnnounce) { nodes.add(injector.getInstance(holder.getKey())); } diff --git a/server/src/main/java/org/apache/druid/guice/FireDepartmentsProvider.java b/server/src/main/java/org/apache/druid/guice/FireDepartmentsProvider.java index b898fac7ea6..9f9e954f8fb 100644 --- a/server/src/main/java/org/apache/druid/guice/FireDepartmentsProvider.java +++ b/server/src/main/java/org/apache/druid/guice/FireDepartmentsProvider.java @@ -22,18 +22,18 @@ package org.apache.druid.guice; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Throwables; -import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Provider; import org.apache.druid.segment.realtime.FireDepartment; +import java.util.ArrayList; import java.util.List; /** */ public class FireDepartmentsProvider implements Provider> { - private final List fireDepartments = Lists.newArrayList(); + private final List fireDepartments = new ArrayList<>(); @Inject public FireDepartmentsProvider( diff --git a/server/src/main/java/org/apache/druid/initialization/Initialization.java b/server/src/main/java/org/apache/druid/initialization/Initialization.java index a72349b8bea..fcf288ac6a3 100644 --- a/server/src/main/java/org/apache/druid/initialization/Initialization.java +++ b/server/src/main/java/org/apache/druid/initialization/Initialization.java @@ -22,9 +22,6 @@ package org.apache.druid.initialization; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Throwables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; @@ -82,6 +79,7 @@ import java.net.URLClassLoader; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; @@ -98,7 +96,7 @@ public class Initialization private static final Logger log = new Logger(Initialization.class); private static final ConcurrentMap loadersMap = new ConcurrentHashMap<>(); - private static final Map extensionsMap = Maps.newHashMap(); + private static final Map extensionsMap = new HashMap<>(); /** * @param clazz service class @@ -111,7 +109,7 @@ public class Initialization @SuppressWarnings("unchecked") Collection retVal = extensionsMap.get(clazz); if (retVal == null) { - return Sets.newHashSet(); + return new HashSet<>(); } return retVal; } @@ -435,7 +433,7 @@ public class Initialization this.modulesConfig = baseInjector.getInstance(ModulesConfig.class); this.jsonMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Json.class)); this.smileMapper = baseInjector.getInstance(Key.get(ObjectMapper.class, Smile.class)); - this.modules = Lists.newArrayList(); + this.modules = new ArrayList<>(); } private List getModules() diff --git a/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index 3dd15e651d0..5c3220e099a 100644 --- a/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/org/apache/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -28,7 +28,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.hash.Hashing; import com.google.common.io.BaseEncoding; @@ -72,6 +71,7 @@ import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -170,7 +170,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor final Interval interval ) throws IOException { - final List identifiers = Lists.newArrayList(); + final List identifiers = new ArrayList<>(); final ResultIterator dbSegments = handle.createQuery( @@ -300,7 +300,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor } // Find which segments are used (i.e. not overshadowed). - final Set usedSegments = Sets.newHashSet(); + final Set usedSegments = new HashSet<>(); List> segmentHolders = VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY); for (TimelineObjectHolder holder : segmentHolders) { @@ -324,7 +324,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor // Set definitelyNotUpdated back to false upon retrying. definitelyNotUpdated.set(false); - final Set inserted = Sets.newHashSet(); + final Set inserted = new HashSet<>(); if (startMetadata != null) { final DataSourceMetadataUpdateResult result = updateDataSourceMetadataWithHandle( @@ -1132,7 +1132,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor .bind("end", interval.getEnd().toString()) .map(ByteArrayMapper.FIRST) .fold( - Lists.newArrayList(), + new ArrayList<>(), new Folder3, byte[]>() { @Override diff --git a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java index b1eb9f5aa42..4cb871969c2 100644 --- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataRuleManager.java @@ -25,8 +25,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.audit.AuditEntry; import org.apache.druid.audit.AuditInfo; @@ -57,7 +55,9 @@ import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ScheduledExecutorService; @@ -284,7 +284,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager } ) .fold( - Maps.newHashMap(), + new HashMap<>(), new Folder3>, Pair>>() { @Override @@ -341,13 +341,13 @@ public class SQLMetadataRuleManager implements MetadataRuleManager public List getRules(final String dataSource) { List retVal = rules.get().get(dataSource); - return retVal == null ? Lists.newArrayList() : retVal; + return retVal == null ? new ArrayList<>() : retVal; } @Override public List getRulesWithDefault(final String dataSource) { - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); Map> theRules = rules.get(); if (theRules.get(dataSource) != null) { retVal.addAll(theRules.get(dataSource)); diff --git a/server/src/main/java/org/apache/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/org/apache/druid/metadata/SQLMetadataSegmentManager.java index 25bd11bcacc..3715168c085 100644 --- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataSegmentManager.java @@ -240,7 +240,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager ) ); - final List segments = Lists.newArrayList(); + final List segments = new ArrayList<>(); List> timelineObjectHolders = segmentTimeline.lookup( Intervals.of("0000-01-01/3000-01-01") ); diff --git a/server/src/main/java/org/apache/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/org/apache/druid/metadata/SQLMetadataSupervisorManager.java index a84227705f1..075a6abf831 100644 --- a/server/src/main/java/org/apache/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/org/apache/druid/metadata/SQLMetadataSupervisorManager.java @@ -24,8 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.guice.annotations.Json; @@ -46,6 +44,8 @@ import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -143,7 +143,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager } } ).fold( - Maps.newHashMap(), + new HashMap<>(), new Folder3>, Pair>() { @Override @@ -157,7 +157,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager try { String specId = pair.lhs; if (!retVal.containsKey(specId)) { - retVal.put(specId, Lists.newArrayList()); + retVal.put(specId, new ArrayList<>()); } retVal.get(specId).add(pair.rhs); @@ -216,7 +216,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager } } ).fold( - Maps.newHashMap(), + new HashMap<>(), new Folder3, Pair>() { @Override diff --git a/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java b/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java index f59df348bc8..10192c730bd 100644 --- a/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java +++ b/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java @@ -126,7 +126,7 @@ public class DataSchema jsonMapper.convertValue(this.parser, InputRowParser.class) ); - final Set dimensionExclusions = Sets.newHashSet(); + final Set dimensionExclusions = new HashSet<>(); for (AggregatorFactory aggregator : aggregators) { dimensionExclusions.addAll(aggregator.requiredFields()); dimensionExclusions.add(aggregator.getName()); @@ -144,7 +144,7 @@ public class DataSchema } } if (dimensionsSpec != null) { - final Set metSet = Sets.newHashSet(); + final Set metSet = new HashSet<>(); for (AggregatorFactory aggregator : aggregators) { metSet.add(aggregator.getName()); } diff --git a/server/src/main/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index fb2b8aa744a..5e95f9c55cb 100644 --- a/server/src/main/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -24,9 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.granularity.Granularities; @@ -35,6 +33,7 @@ import org.apache.druid.java.util.common.guava.Comparators; import org.joda.time.DateTime; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.List; import java.util.SortedSet; import java.util.TreeSet; @@ -54,10 +53,10 @@ public class ArbitraryGranularitySpec implements GranularitySpec { this.queryGranularity = queryGranularity == null ? Granularities.NONE : queryGranularity; this.rollup = rollup == null ? Boolean.TRUE : rollup; - this.intervals = Sets.newTreeSet(Comparators.intervalsByStartThenEnd()); + this.intervals = new TreeSet<>(Comparators.intervalsByStartThenEnd()); if (inputIntervals == null) { - inputIntervals = Lists.newArrayList(); + inputIntervals = new ArrayList<>(); } // Insert all intervals diff --git a/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java index 778ca635e7f..c73b1b7c268 100644 --- a/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java +++ b/server/src/main/java/org/apache/druid/segment/indexing/granularity/UniformGranularitySpec.java @@ -24,12 +24,12 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.granularity.Granularity; import org.joda.time.DateTime; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.List; import java.util.SortedSet; @@ -57,7 +57,7 @@ public class UniformGranularitySpec implements GranularitySpec this.segmentGranularity = segmentGranularity == null ? DEFAULT_SEGMENT_GRANULARITY : segmentGranularity; if (inputIntervals != null) { - List granularIntervals = Lists.newArrayList(); + List granularIntervals = new ArrayList<>(); for (Interval inputInterval : inputIntervals) { Iterables.addAll(granularIntervals, this.segmentGranularity.getIterable(inputInterval)); } diff --git a/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java b/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java index 3957fda4db6..cdcd316316d 100644 --- a/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java +++ b/server/src/main/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManager.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.loading; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import com.google.inject.Inject; import org.apache.commons.io.FileUtils; @@ -70,7 +69,7 @@ public class SegmentLoaderLocalCacheManager implements SegmentLoader this.config = config; this.jsonMapper = mapper; - this.locations = Lists.newArrayList(); + this.locations = new ArrayList<>(); for (StorageLocationConfig locationConfig : config.getLocations()) { locations.add(new StorageLocation( locationConfig.getPath(), diff --git a/server/src/main/java/org/apache/druid/segment/loading/StorageLocation.java b/server/src/main/java/org/apache/druid/segment/loading/StorageLocation.java index 6c603e3414d..bc550f837e1 100644 --- a/server/src/main/java/org/apache/druid/segment/loading/StorageLocation.java +++ b/server/src/main/java/org/apache/druid/segment/loading/StorageLocation.java @@ -19,12 +19,12 @@ package org.apache.druid.segment.loading; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.timeline.DataSegment; import javax.annotation.Nullable; import java.io.File; +import java.util.HashSet; import java.util.Set; /** @@ -58,7 +58,7 @@ class StorageLocation this.freeSpaceToKeep = 0; } - this.segments = Sets.newHashSet(); + this.segments = new HashSet<>(); } File getPath() diff --git a/server/src/main/java/org/apache/druid/segment/realtime/RealtimeManager.java b/server/src/main/java/org/apache/druid/segment/realtime/RealtimeManager.java index d41358f17d7..bc07ba820d6 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/RealtimeManager.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/RealtimeManager.java @@ -26,7 +26,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import com.google.inject.Inject; import org.apache.druid.data.input.Committer; @@ -90,7 +89,7 @@ public class RealtimeManager implements QuerySegmentWalker DataSegmentServerAnnouncer serverAnnouncer ) { - this(fireDepartments, conglomerate, serverAnnouncer, Maps.newHashMap()); + this(fireDepartments, conglomerate, serverAnnouncer, new HashMap<>()); } @VisibleForTesting @@ -104,7 +103,7 @@ public class RealtimeManager implements QuerySegmentWalker this.fireDepartments = fireDepartments; this.conglomerate = conglomerate; this.serverAnnouncer = serverAnnouncer; - this.chiefs = chiefs == null ? Maps.newHashMap() : Maps.newHashMap(chiefs); + this.chiefs = chiefs == null ? new HashMap<>() : new HashMap<>(chiefs); } @VisibleForTesting diff --git a/server/src/main/java/org/apache/druid/segment/realtime/RealtimeMetricsMonitor.java b/server/src/main/java/org/apache/druid/segment/realtime/RealtimeMetricsMonitor.java index 4af5ad4d0d4..4768ca94f30 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/RealtimeMetricsMonitor.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/RealtimeMetricsMonitor.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.realtime; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; @@ -29,6 +28,7 @@ import org.apache.druid.java.util.metrics.AbstractMonitor; import org.apache.druid.java.util.metrics.MonitorUtils; import org.apache.druid.query.DruidMetrics; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -55,7 +55,7 @@ public class RealtimeMetricsMonitor extends AbstractMonitor public RealtimeMetricsMonitor(List fireDepartments, Map dimensions) { this.fireDepartments = fireDepartments; - this.previousValues = Maps.newHashMap(); + this.previousValues = new HashMap<>(); this.dimensions = ImmutableMap.copyOf(dimensions); } diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorDriverMetadata.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorDriverMetadata.java index 03cd0ff45b6..f06ffd010b0 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorDriverMetadata.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorDriverMetadata.java @@ -22,11 +22,11 @@ package org.apache.druid.segment.realtime.appenderator; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.segment.realtime.appenderator.SegmentWithState.SegmentState; import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -58,8 +58,8 @@ public class AppenderatorDriverMetadata ); if (segments == null) { // convert old metadata to new one - final Map> newMetadata = Maps.newHashMap(); - final Set activeSegmentsAlreadySeen = Sets.newHashSet(); // temp data structure + final Map> newMetadata = new HashMap<>(); + final Set activeSegmentsAlreadySeen = new HashSet<>(); // temp data structure activeSegments.entrySet() .forEach(sequenceSegments -> newMetadata.put( diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java index 38530084a7f..9c837b08816 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorImpl.java @@ -30,7 +30,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.util.concurrent.FutureCallback; @@ -90,6 +89,7 @@ import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -451,7 +451,7 @@ public class AppenderatorImpl implements Appenderator uncommitFuture.get(); // Drop everything. - final List> futures = Lists.newArrayList(); + final List> futures = new ArrayList<>(); for (Map.Entry entry : sinks.entrySet()) { futures.add(abandonSegment(entry.getKey(), entry.getValue(), true)); } @@ -481,8 +481,8 @@ public class AppenderatorImpl implements Appenderator { throwPersistErrorIfExists(); - final Map currentHydrants = Maps.newHashMap(); - final List> indexesToPersist = Lists.newArrayList(); + final Map currentHydrants = new HashMap<>(); + final List> indexesToPersist = new ArrayList<>(); int numPersistedRows = 0; long bytesPersisted = 0L; for (SegmentIdentifier identifier : sinks.keySet()) { @@ -544,7 +544,7 @@ public class AppenderatorImpl implements Appenderator try { commitLock.lock(); - final Map commitHydrants = Maps.newHashMap(); + final Map commitHydrants = new HashMap<>(); final Committed oldCommit = readCommit(); if (oldCommit != null) { // merge current hydrants with existing hydrants @@ -595,7 +595,7 @@ public class AppenderatorImpl implements Appenderator final boolean useUniquePath ) { - final Map theSinks = Maps.newHashMap(); + final Map theSinks = new HashMap<>(); for (final SegmentIdentifier identifier : identifiers) { final Sink sink = sinks.get(identifier); if (sink == null) { @@ -612,7 +612,7 @@ public class AppenderatorImpl implements Appenderator // segments. persistAll(committer), (Function) commitMetadata -> { - final List dataSegments = Lists.newArrayList(); + final List dataSegments = new ArrayList<>(); for (Map.Entry entry : theSinks.entrySet()) { if (droppingSinks.contains(entry.getKey())) { @@ -705,7 +705,7 @@ public class AppenderatorImpl implements Appenderator } final File mergedFile; - List indexes = Lists.newArrayList(); + List indexes = new ArrayList<>(); Closer closer = Closer.create(); try { for (FireHydrant fireHydrant : sink) { @@ -769,7 +769,7 @@ public class AppenderatorImpl implements Appenderator log.info("Shutting down..."); - final List> futures = Lists.newArrayList(); + final List> futures = new ArrayList<>(); for (Map.Entry entry : sinks.entrySet()) { futures.add(abandonSegment(entry.getKey(), entry.getValue(), false)); } @@ -1015,7 +1015,7 @@ public class AppenderatorImpl implements Appenderator (o1, o2) -> Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName())) ); - List hydrants = Lists.newArrayList(); + List hydrants = new ArrayList<>(); for (File hydrantDir : sinkFiles) { final int hydrantNumber = Integer.parseInt(hydrantDir.getName()); diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumber.java index d0e72e3d6d1..9755e6523f9 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -62,6 +62,7 @@ import org.joda.time.Interval; import org.joda.time.Period; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -396,7 +397,7 @@ public class AppenderatorPlumber implements Plumber long minTimestamp = minTimestampAsDate.getMillis(); final List appenderatorSegments = appenderator.getSegments(); - final List segmentsToPush = Lists.newArrayList(); + final List segmentsToPush = new ArrayList<>(); if (shuttingDown) { log.info("Found [%,d] segments. Attempting to hand off all of them.", appenderatorSegments.size()); diff --git a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/Committed.java b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/Committed.java index ba42670bd09..9b4bb98f8a1 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/appenderator/Committed.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/appenderator/Committed.java @@ -22,8 +22,8 @@ package org.apache.druid.segment.realtime.appenderator; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -77,7 +77,7 @@ public class Committed public Committed without(final String identifierAsString) { - final Map newHydrants = Maps.newHashMap(); + final Map newHydrants = new HashMap<>(); newHydrants.putAll(hydrants); newHydrants.remove(identifierAsString); return new Committed(newHydrants, metadata); @@ -85,7 +85,7 @@ public class Committed public Committed with(final Map hydrantsToAdd) { - final Map newHydrants = Maps.newHashMap(); + final Map newHydrants = new HashMap<>(); newHydrants.putAll(hydrants); newHydrants.putAll(hydrantsToAdd); return new Committed(newHydrants, metadata); diff --git a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java index f3b7b8035ad..9939fad7dae 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/firehose/EventReceiverFirehoseFactory.java @@ -31,7 +31,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.io.CountingInputStream; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.FirehoseFactory; @@ -66,6 +65,7 @@ import javax.ws.rs.core.Response; import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; @@ -259,7 +259,7 @@ public class EventReceiverFirehoseFactory implements FirehoseFactory rows = Lists.newArrayList(); + final List rows = new ArrayList<>(); for (final Map event : events) { // Might throw an exception. We'd like that to happen now, instead of while adding to the row buffer. rows.addAll(parser.parseBatch(event)); diff --git a/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java b/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java index 7a39423f6fd..d81afe17e4b 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java @@ -49,6 +49,7 @@ import org.apache.druid.utils.Runnables; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -95,7 +96,7 @@ public class IngestSegmentFirehose implements Firehose final BaseLongColumnValueSelector timestampColumnSelector = cursor.getColumnSelectorFactory().makeColumnValueSelector(ColumnHolder.TIME_COLUMN_NAME); - final Map dimSelectors = Maps.newHashMap(); + final Map dimSelectors = new HashMap<>(); for (String dim : dims) { final DimensionSelector dimSelector = cursor .getColumnSelectorFactory() @@ -106,7 +107,7 @@ public class IngestSegmentFirehose implements Firehose } } - final Map metSelectors = Maps.newHashMap(); + final Map metSelectors = new HashMap<>(); for (String metric : metrics) { final BaseObjectColumnValueSelector metricSelector = cursor.getColumnSelectorFactory().makeColumnValueSelector(metric); diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java index ceecde5d040..d552b10170e 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumber.java @@ -20,7 +20,6 @@ package org.apache.druid.segment.realtime.plumber; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import org.apache.druid.client.cache.Cache; import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; @@ -42,6 +41,7 @@ import org.apache.druid.server.coordination.DataSegmentAnnouncer; import org.joda.time.DateTime; import org.joda.time.Duration; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; @@ -180,7 +180,7 @@ public class FlushingPlumber extends RealtimePlumber getRejectionPolicy().getCurrMaxTime().minus(windowMillis) ).getMillis(); - List> sinksToPush = Lists.newArrayList(); + List> sinksToPush = new ArrayList<>(); for (Map.Entry entry : getSinks().entrySet()) { final Long intervalStart = entry.getKey(); if (intervalStart < minTimestamp) { diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java index 507e8f22efc..8d80f609cb2 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/RealtimePlumber.java @@ -28,7 +28,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import org.apache.commons.io.FileUtils; import org.apache.druid.client.cache.Cache; @@ -86,6 +85,7 @@ import java.io.Closeable; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; @@ -280,7 +280,7 @@ public class RealtimePlumber implements Plumber @Override public void persist(final Committer committer) { - final List> indexesToPersist = Lists.newArrayList(); + final List> indexesToPersist = new ArrayList<>(); for (Sink sink : sinks.values()) { if (sink.swappable()) { indexesToPersist.add(Pair.of(sink.swap(), sink.getInterval())); @@ -419,7 +419,7 @@ public class RealtimePlumber implements Plumber mergeStopwatch = Stopwatch.createStarted(); final File mergedFile; - List indexes = Lists.newArrayList(); + List indexes = new ArrayList<>(); Closer closer = Closer.create(); try { for (FireHydrant fireHydrant : sink) { @@ -657,7 +657,7 @@ public class RealtimePlumber implements Plumber } ); boolean isCorrupted = false; - List hydrants = Lists.newArrayList(); + List hydrants = new ArrayList<>(); for (File segmentDir : sinkFiles) { log.info("Loading previously persisted segment at [%s]", segmentDir); @@ -851,7 +851,7 @@ public class RealtimePlumber implements Plumber minTimestampAsDate ); - List> sinksToPush = Lists.newArrayList(); + List> sinksToPush = new ArrayList<>(); for (Map.Entry entry : sinks.entrySet()) { final Long intervalStart = entry.getKey(); if (intervalStart < minTimestamp) { diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/Sink.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/Sink.java index 90aced3ffe7..e9d66f7f40e 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/Sink.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/Sink.java @@ -24,8 +24,6 @@ import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.data.input.InputRow; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; @@ -44,7 +42,9 @@ import org.apache.druid.timeline.partition.ShardSpec; import org.joda.time.Interval; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; @@ -67,7 +67,7 @@ public class Sink implements Iterable private final long maxBytesInMemory; private final boolean reportParseExceptions; private final CopyOnWriteArrayList hydrants = new CopyOnWriteArrayList(); - private final LinkedHashSet dimOrder = Sets.newLinkedHashSet(); + private final LinkedHashSet dimOrder = new LinkedHashSet<>(); private final AtomicInteger numRowsExcludingCurrIndex = new AtomicInteger(); private volatile FireHydrant currHydrant; private volatile boolean writable = true; @@ -246,7 +246,7 @@ public class Sink implements Iterable interval, version, ImmutableMap.of(), - Lists.newArrayList(), + new ArrayList<>(), Lists.transform( Arrays.asList(schema.getAggregators()), new Function() { @@ -358,7 +358,7 @@ public class Sink implements Iterable if (!indexSchema.getDimensionsSpec().hasCustomDimensions()) { Map oldCapabilities; if (lastHydrant.hasSwapped()) { - oldCapabilities = Maps.newHashMap(); + oldCapabilities = new HashMap<>(); ReferenceCountingSegment segment = lastHydrant.getIncrementedSegment(); try { QueryableIndex oldIndex = segment.asQueryableIndex(); diff --git a/server/src/main/java/org/apache/druid/server/ClientInfoResource.java b/server/src/main/java/org/apache/druid/server/ClientInfoResource.java index b60cb8dabdd..00dde7790ad 100644 --- a/server/src/main/java/org/apache/druid/server/ClientInfoResource.java +++ b/server/src/main/java/org/apache/druid/server/ClientInfoResource.java @@ -22,9 +22,7 @@ package org.apache.druid.server; import com.google.common.base.Function; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import org.apache.druid.client.DruidDataSource; @@ -61,8 +59,11 @@ import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -103,11 +104,11 @@ public class ClientInfoResource private Map> getSegmentsForDatasources() { - final Map> dataSourceMap = Maps.newHashMap(); + final Map> dataSourceMap = new HashMap<>(); for (DruidServer server : serverInventoryView.getInventory()) { for (DruidDataSource dataSource : server.getDataSources()) { if (!dataSourceMap.containsKey(dataSource.getName())) { - dataSourceMap.put(dataSource.getName(), Lists.newArrayList()); + dataSourceMap.put(dataSource.getName(), new ArrayList<>()); } List segments = dataSourceMap.get(dataSource.getName()); segments.addAll(dataSource.getSegments()); @@ -180,8 +181,8 @@ public class ClientInfoResource ); for (TimelineObjectHolder holder : serversLookup) { - final Set dimensions = Sets.newHashSet(); - final Set metrics = Sets.newHashSet(); + final Set dimensions = new HashSet<>(); + final Set metrics = new HashSet<>(); final PartitionHolder partitionHolder = holder.getObject(); if (partitionHolder.isComplete()) { for (ServerSelector server : partitionHolder.payloads()) { @@ -231,7 +232,7 @@ public class ClientInfoResource ) { final List segments = getSegmentsForDatasources().get(dataSourceName); - final Set dims = Sets.newHashSet(); + final Set dims = new HashSet<>(); if (segments == null || segments.isEmpty()) { return dims; @@ -265,7 +266,7 @@ public class ClientInfoResource ) { final List segments = getSegmentsForDatasources().get(dataSourceName); - final Set metrics = Sets.newHashSet(); + final Set metrics = new HashSet<>(); if (segments == null || segments.isEmpty()) { return metrics; @@ -299,7 +300,7 @@ public class ClientInfoResource @Context final HttpServletRequest req ) { - List intervalList = Lists.newArrayList(); + List intervalList = new ArrayList<>(); for (String interval : intervals.split(",")) { intervalList.add(Intervals.of(interval.trim())); } diff --git a/server/src/main/java/org/apache/druid/server/QueryResource.java b/server/src/main/java/org/apache/druid/server/QueryResource.java index 12416e40ba0..15b63729d1b 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResource.java +++ b/server/src/main/java/org/apache/druid/server/QueryResource.java @@ -24,12 +24,10 @@ import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.module.SimpleModule; import com.fasterxml.jackson.datatype.joda.ser.DateTimeSerializer; import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes; - import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import com.google.common.io.CountingOutputStream; import com.google.inject.Inject; import org.apache.druid.client.DirectDruidClient; @@ -71,6 +69,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; /** @@ -135,7 +134,7 @@ public class QueryResource implements QueryCountStatsProvider Set datasources = queryManager.getQueryDatasources(queryId); if (datasources == null) { log.warn("QueryId [%s] not registered with QueryManager, cannot cancel", queryId); - datasources = Sets.newTreeSet(); + datasources = new TreeSet<>(); } Access authResult = AuthorizationUtils.authorizeAllResourceActions( diff --git a/server/src/main/java/org/apache/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/org/apache/druid/server/coordination/BatchDataSegmentAnnouncer.java index e8c50642cc5..bf282df849d 100644 --- a/server/src/main/java/org/apache/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/org/apache/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -25,7 +25,6 @@ import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import com.google.inject.Inject; @@ -43,6 +42,7 @@ import org.apache.druid.timeline.DataSegment; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -213,7 +213,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer public void announceSegments(Iterable segments) throws IOException { SegmentZNode segmentZNode = new SegmentZNode(makeServedSegmentPath()); - Set batch = Sets.newHashSet(); + Set batch = new HashSet<>(); List changesBatch = new ArrayList<>(); int byteSize = 0; @@ -247,7 +247,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer announcer.announce(segmentZNode.getPath(), segmentZNode.getBytes()); segmentZNode = new SegmentZNode(makeServedSegmentPath()); - batch = Sets.newHashSet(); + batch = new HashSet<>(); count = 0; byteSize = 0; } @@ -353,7 +353,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer public Set getSegments() { if (bytes.length == 0) { - return Sets.newHashSet(); + return new HashSet<>(); } try { return jsonMapper.readValue( diff --git a/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java b/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java index d7499ccca67..15d60a4fdf8 100644 --- a/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java +++ b/server/src/main/java/org/apache/druid/server/coordination/SegmentLoadDropHandler.java @@ -28,9 +28,7 @@ import com.google.common.base.Throwables; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; -import com.google.common.collect.Queues; import com.google.common.util.concurrent.AbstractFuture; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; @@ -199,7 +197,7 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler } } - List cachedSegments = Lists.newArrayList(); + List cachedSegments = new ArrayList<>(); File[] segmentsToLoad = baseDir.listFiles(); int ignored = 0; for (int i = 0; i < segmentsToLoad.length; i++) { @@ -596,7 +594,7 @@ public class SegmentLoadDropHandler implements DataSegmentChangeHandler this.announcer = announcer; this.exec = exec; this.intervalMillis = intervalMillis; - this.queue = Queues.newLinkedBlockingQueue(); + this.queue = new LinkedBlockingQueue<>(); this.doneAnnouncing = SettableFuture.create(); } diff --git a/server/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategy.java b/server/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategy.java index c7e31a1a70f..066a5dc7076 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategy.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategy.java @@ -21,7 +21,6 @@ package org.apache.druid.server.coordinator; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; @@ -221,7 +220,7 @@ public class CostBalancerStrategy implements BalancerStrategy @Override public Iterator pickServersToDrop(DataSegment toDrop, NavigableSet serverHolders) { - List>> futures = Lists.newArrayList(); + List>> futures = new ArrayList<>(); for (final ServerHolder server : serverHolders) { futures.add( @@ -368,7 +367,7 @@ public class CostBalancerStrategy implements BalancerStrategy { Pair bestServer = Pair.of(Double.POSITIVE_INFINITY, null); - List>> futures = Lists.newArrayList(); + List>> futures = new ArrayList<>(); for (final ServerHolder server : serverHolders) { futures.add( diff --git a/server/src/main/java/org/apache/druid/server/coordinator/CuratorLoadQueuePeon.java b/server/src/main/java/org/apache/druid/server/coordinator/CuratorLoadQueuePeon.java index 31edb69eb36..c790382c2ee 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/CuratorLoadQueuePeon.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/CuratorLoadQueuePeon.java @@ -21,7 +21,6 @@ package org.apache.druid.server.coordinator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Lists; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.CuratorWatcher; import org.apache.curator.utils.ZKPaths; @@ -36,6 +35,7 @@ import org.apache.druid.timeline.DataSegment; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.data.Stat; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -411,7 +411,7 @@ public class CuratorLoadQueuePeon extends LoadQueuePeon private final DataSegment segment; private final DataSegmentChangeRequest changeRequest; private final int type; - private final List callbacks = Lists.newArrayList(); + private final List callbacks = new ArrayList<>(); private SegmentHolder( DataSegment segment, diff --git a/server/src/main/java/org/apache/druid/server/coordinator/DatasourceWhitelist.java b/server/src/main/java/org/apache/druid/server/coordinator/DatasourceWhitelist.java index 6d89be3863f..754228ce60f 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/DatasourceWhitelist.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/DatasourceWhitelist.java @@ -21,9 +21,9 @@ package org.apache.druid.server.coordinator; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; -import com.google.common.collect.Sets; import java.util.Set; +import java.util.TreeSet; /** */ @@ -36,7 +36,7 @@ public class DatasourceWhitelist @JsonCreator public DatasourceWhitelist(Set dataSources) { - this.dataSources = Sets.newTreeSet(String.CASE_INSENSITIVE_ORDER); + this.dataSources = new TreeSet<>(String.CASE_INSENSITIVE_ORDER); this.dataSources.addAll(dataSources); } diff --git a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java index 40a973950ce..8bf9ed5b448 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinator.java @@ -22,8 +22,6 @@ package org.apache.druid.server.coordinator; import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; @@ -78,11 +76,14 @@ import org.joda.time.DateTime; import org.joda.time.Duration; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -229,7 +230,7 @@ public class DruidCoordinator public Map> getReplicationStatus() { - final Map> retVal = Maps.newHashMap(); + final Map> retVal = new HashMap<>(); if (segmentReplicantLookup == null) { return retVal; @@ -290,7 +291,7 @@ public class DruidCoordinator public Map getLoadStatus() { - Map loadStatus = Maps.newHashMap(); + Map loadStatus = new HashMap<>(); for (ImmutableDruidDataSource dataSource : metadataSegmentManager.getInventory()) { final Set segments = Sets.newHashSet(dataSource.getSegments()); final int availableSegmentSize = segments.size(); @@ -454,7 +455,7 @@ public class DruidCoordinator public Set getOrderedAvailableDataSegments() { - Set availableSegments = Sets.newTreeSet(SEGMENT_COMPARATOR); + Set availableSegments = new TreeSet<>(SEGMENT_COMPARATOR); Iterable dataSegments = getAvailableDataSegments(); @@ -538,7 +539,7 @@ public class DruidCoordinator serviceAnnouncer.announce(self); final int startingLeaderCounter = coordLeaderSelector.localTerm(); - final List> coordinatorRunnables = Lists.newArrayList(); + final List> coordinatorRunnables = new ArrayList<>(); coordinatorRunnables.add( Pair.of( new CoordinatorHistoricalManagerRunnable(startingLeaderCounter), @@ -606,7 +607,7 @@ public class DruidCoordinator private List makeIndexingServiceHelpers() { - List helpers = Lists.newArrayList(); + List helpers = new ArrayList<>(); helpers.add(new DruidCoordinatorSegmentInfoLoader(DruidCoordinator.this)); helpers.add(segmentCompactor); helpers.addAll(indexingServiceHelpers); diff --git a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinatorRuntimeParams.java b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinatorRuntimeParams.java index 8ad0b4d7e33..c99438cb300 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinatorRuntimeParams.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/DruidCoordinatorRuntimeParams.java @@ -19,8 +19,6 @@ package org.apache.druid.server.coordinator; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.emitter.service.ServiceEmitter; @@ -196,7 +194,7 @@ public class DruidCoordinatorRuntimeParams databaseRuleManager, segmentReplicantLookup, dataSources, - Sets.newTreeSet(DruidCoordinator.SEGMENT_COMPARATOR), + new TreeSet<>(DruidCoordinator.SEGMENT_COMPARATOR), loadManagementPeons, replicationManager, emitter, @@ -233,7 +231,7 @@ public class DruidCoordinatorRuntimeParams this.segmentReplicantLookup = null; this.dataSources = new HashMap<>(); this.availableSegments = new TreeSet<>(DruidCoordinator.SEGMENT_COMPARATOR); - this.loadManagementPeons = Maps.newHashMap(); + this.loadManagementPeons = new HashMap<>(); this.replicationManager = null; this.emitter = null; this.stats = new CoordinatorStats(); diff --git a/server/src/main/java/org/apache/druid/server/coordinator/HttpLoadQueuePeon.java b/server/src/main/java/org/apache/druid/server/coordinator/HttpLoadQueuePeon.java index a095a346830..ef86d1efc24 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/HttpLoadQueuePeon.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/HttpLoadQueuePeon.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.google.common.base.Throwables; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.FutureCallback; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; @@ -470,7 +469,7 @@ public class HttpLoadQueuePeon extends LoadQueuePeon { private final DataSegment segment; private final DataSegmentChangeRequest changeRequest; - private final List callbacks = Lists.newArrayList(); + private final List callbacks = new ArrayList<>(); // Time when this request was sent to target server the first time. private volatile long scheduleTime = -1; diff --git a/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java b/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java index f407d18a91c..f9b4d7bf748 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/ReplicationThrottler.java @@ -19,11 +19,11 @@ package org.apache.druid.server.coordinator; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.emitter.EmittingLogger; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -35,7 +35,7 @@ public class ReplicationThrottler { private static final EmittingLogger log = new EmittingLogger(ReplicationThrottler.class); - private final Map replicatingLookup = Maps.newHashMap(); + private final Map replicatingLookup = new HashMap<>(); private final ReplicatorSegmentHolder currentlyReplicating = new ReplicatorSegmentHolder(); private volatile int maxReplicants; @@ -101,8 +101,8 @@ public class ReplicationThrottler private class ReplicatorSegmentHolder { - private final Map> currentlyProcessingSegments = Maps.newHashMap(); - private final Map lifetimes = Maps.newHashMap(); + private final Map> currentlyProcessingSegments = new HashMap<>(); + private final Map lifetimes = new HashMap<>(); public boolean isAtMaxReplicants(String tier) { @@ -165,7 +165,7 @@ public class ReplicationThrottler public List getCurrentlyProcessingSegmentsAndHosts(String tier) { Map segments = currentlyProcessingSegments.get(tier); - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); for (Map.Entry entry : segments.entrySet()) { retVal.add( StringUtils.format("%s ON %s", entry.getKey(), entry.getValue()) diff --git a/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java b/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java index b5e01565a70..145b69b57de 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/SegmentReplicantLookup.java @@ -20,11 +20,11 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.HashBasedTable; -import com.google.common.collect.Maps; import com.google.common.collect.Table; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.timeline.DataSegment; +import java.util.HashMap; import java.util.Map; import java.util.SortedSet; @@ -77,7 +77,7 @@ public class SegmentReplicantLookup public Map getClusterTiers(String segmentId) { Map retVal = segmentsInCluster.row(segmentId); - return (retVal == null) ? Maps.newHashMap() : retVal; + return (retVal == null) ? new HashMap<>() : retVal; } public int getLoadedReplicants(String segmentId) diff --git a/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java b/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java index 6894934dbbf..cae9895d921 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorCleanupOvershadowed.java @@ -19,7 +19,6 @@ package org.apache.druid.server.coordinator.helper; -import com.google.common.collect.Maps; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.java.util.common.guava.Comparators; @@ -31,6 +30,7 @@ import org.apache.druid.server.coordinator.ServerHolder; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.VersionedIntervalTimeline; +import java.util.HashMap; import java.util.Map; import java.util.SortedSet; @@ -52,7 +52,7 @@ public class DruidCoordinatorCleanupOvershadowed implements DruidCoordinatorHelp // Unservice old partitions if we've had enough time to make sure we aren't flapping with old data if (params.hasDeletionWaitTimeElapsed()) { DruidCluster cluster = params.getDruidCluster(); - Map> timelines = Maps.newHashMap(); + Map> timelines = new HashMap<>(); for (SortedSet serverHolders : cluster.getSortedHistoricalsByTier()) { for (ServerHolder serverHolder : serverHolders) { diff --git a/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java b/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java index 87e44ee87df..6d9072ecca4 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/helper/DruidCoordinatorSegmentMerger.java @@ -25,7 +25,6 @@ import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Multiset; import com.google.common.collect.Ordering; import com.google.inject.Inject; @@ -48,6 +47,8 @@ import org.apache.druid.timeline.partition.PartitionChunk; import org.joda.time.DateTime; import org.joda.time.Interval; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -77,7 +78,7 @@ public class DruidCoordinatorSegmentMerger implements DruidCoordinatorHelper DatasourceWhitelist whitelist = whiteListRef.get(); CoordinatorStats stats = new CoordinatorStats(); - Map> dataSources = Maps.newHashMap(); + Map> dataSources = new HashMap<>(); // Find serviced segments by using a timeline for (DataSegment dataSegment : params.getAvailableSegments()) { @@ -193,7 +194,7 @@ public class DruidCoordinatorSegmentMerger implements DruidCoordinatorHelper private SegmentsToMerge() { - this.timelineObjects = Lists.newArrayList(); + this.timelineObjects = new ArrayList<>(); this.segments = HashMultiset.create(); this.byteCount = 0; } diff --git a/server/src/main/java/org/apache/druid/server/coordinator/rules/RuleMap.java b/server/src/main/java/org/apache/druid/server/coordinator/rules/RuleMap.java index 58fcce380a8..0ecaafe88e9 100644 --- a/server/src/main/java/org/apache/druid/server/coordinator/rules/RuleMap.java +++ b/server/src/main/java/org/apache/druid/server/coordinator/rules/RuleMap.java @@ -19,8 +19,7 @@ package org.apache.druid.server.coordinator.rules; -import com.google.common.collect.Lists; - +import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -39,7 +38,7 @@ public class RuleMap public List getRules(String dataSource) { - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); if (dataSource != null) { retVal.addAll(rules.get(dataSource)); } diff --git a/server/src/main/java/org/apache/druid/server/emitter/EmitterModule.java b/server/src/main/java/org/apache/druid/server/emitter/EmitterModule.java index 0b731a59fa3..95e4eb39eaf 100644 --- a/server/src/main/java/org/apache/druid/server/emitter/EmitterModule.java +++ b/server/src/main/java/org/apache/druid/server/emitter/EmitterModule.java @@ -21,7 +21,6 @@ package org.apache.druid.server.emitter; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.inject.Binder; import com.google.inject.Binding; import com.google.inject.Inject; @@ -45,6 +44,7 @@ import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.server.DruidNode; import java.lang.annotation.Annotation; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; @@ -137,7 +137,7 @@ public class EmitterModule implements Module } if (emitter == null) { - List knownTypes = Lists.newArrayList(); + List knownTypes = new ArrayList<>(); for (Binding binding : emitterBindings) { final Annotation annotation = binding.getKey().getAnnotation(); if (annotation != null) { diff --git a/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java b/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java index 2708871bb50..793e19d0add 100644 --- a/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java +++ b/server/src/main/java/org/apache/druid/server/http/DatasourcesResource.java @@ -21,9 +21,6 @@ package org.apache.druid.server.http; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import org.apache.druid.client.CoordinatorServerView; @@ -65,8 +62,10 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -74,6 +73,7 @@ import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; +import java.util.TreeSet; import java.util.stream.Collectors; /** @@ -279,11 +279,11 @@ public class DatasourcesResource final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { - final Map> retVal = Maps.newTreeMap(comparator); + final Map> retVal = new TreeMap<>(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { Map segments = retVal.get(dataSegment.getInterval()); if (segments == null) { - segments = Maps.newHashMap(); + segments = new HashMap<>(); retVal.put(dataSegment.getInterval(), segments); } @@ -298,11 +298,11 @@ public class DatasourcesResource } if (simple != null) { - final Map> retVal = Maps.newTreeMap(comparator); + final Map> retVal = new TreeMap<>(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { Map properties = retVal.get(dataSegment.getInterval()); if (properties == null) { - properties = Maps.newHashMap(); + properties = new HashMap<>(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); @@ -316,7 +316,7 @@ public class DatasourcesResource return Response.ok(retVal).build(); } - final Set intervals = Sets.newTreeSet(comparator); + final Set intervals = new TreeSet<>(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { intervals.add(dataSegment.getInterval()); } @@ -344,12 +344,12 @@ public class DatasourcesResource final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { - final Map> retVal = Maps.newTreeMap(comparator); + final Map> retVal = new TreeMap<>(comparator); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map segments = retVal.get(dataSegment.getInterval()); if (segments == null) { - segments = Maps.newHashMap(); + segments = new HashMap<>(); retVal.put(dataSegment.getInterval(), segments); } @@ -365,12 +365,12 @@ public class DatasourcesResource } if (simple != null) { - final Map> retVal = Maps.newHashMap(); + final Map> retVal = new HashMap<>(); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map properties = retVal.get(dataSegment.getInterval()); if (properties == null) { - properties = Maps.newHashMap(); + properties = new HashMap<>(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); @@ -385,7 +385,7 @@ public class DatasourcesResource return Response.ok(retVal).build(); } - final Set retVal = Sets.newTreeSet(Comparators.inverse(String.CASE_INSENSITIVE_ORDER)); + final Set retVal = new TreeSet<>(Comparators.inverse(String.CASE_INSENSITIVE_ORDER)); for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { retVal.add(dataSegment.getIdentifier()); @@ -486,7 +486,7 @@ public class DatasourcesResource @PathParam("dataSourceName") String dataSourceName ) { - Set retVal = Sets.newHashSet(); + Set retVal = new HashSet<>(); for (DruidServer druidServer : serverInventoryView.getInventory()) { if (druidServer.getDataSource(dataSourceName) != null) { retVal.add(druidServer.getTier()); @@ -525,7 +525,7 @@ public class DatasourcesResource private Pair> getSegment(String segmentId) { DataSegment theSegment = null; - Set servers = Sets.newHashSet(); + Set servers = new HashSet<>(); for (DruidServer druidServer : serverInventoryView.getInventory()) { DataSegment currSegment = druidServer.getSegments().get(segmentId); if (currSegment != null) { @@ -551,14 +551,14 @@ public class DatasourcesResource private Map> getSimpleDatasource(String dataSourceName) { - Map tiers = Maps.newHashMap(); - Map segments = Maps.newHashMap(); + Map tiers = new HashMap<>(); + Map segments = new HashMap<>(); Map> retVal = ImmutableMap.of( "tiers", tiers, "segments", segments ); - Set totalDistinctSegments = Sets.newHashSet(); - Map> tierDistinctSegments = Maps.newHashMap(); + Set totalDistinctSegments = new HashSet<>(); + Map> tierDistinctSegments = new HashMap<>(); long totalSegmentSize = 0; DateTime minTime = DateTimes.MAX; @@ -573,7 +573,7 @@ public class DatasourcesResource } if (!tierDistinctSegments.containsKey(tier)) { - tierDistinctSegments.put(tier, Sets.newHashSet()); + tierDistinctSegments.put(tier, new HashSet<>()); } long dataSourceSegmentSize = 0; @@ -596,7 +596,7 @@ public class DatasourcesResource // tier stats Map tierStats = (Map) tiers.get(tier); if (tierStats == null) { - tierStats = Maps.newHashMap(); + tierStats = new HashMap<>(); tiers.put(druidServer.getTier(), tierStats); } tierStats.put("segmentCount", tierDistinctSegments.get(tier).size()); @@ -632,7 +632,7 @@ public class DatasourcesResource final Interval theInterval = Intervals.of(interval.replace("_", "/")); if (timeline == null) { log.debug("No timeline found for datasource[%s]", dataSourceName); - return Response.ok(Lists.newArrayList()).build(); + return Response.ok(new ArrayList()).build(); } Iterable> lookup = timeline.lookupWithIncompletePartitions(theInterval); diff --git a/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java b/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java index a3c3c5275b2..29d92f09826 100644 --- a/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/org/apache/druid/server/http/IntervalsResource.java @@ -19,7 +19,6 @@ package org.apache.druid.server.http; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.InventoryView; @@ -41,8 +40,10 @@ import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.util.Comparator; +import java.util.HashMap; import java.util.Map; import java.util.Set; +import java.util.TreeMap; /** */ @@ -76,12 +77,12 @@ public class IntervalsResource authorizerMapper ); - final Map>> retVal = Maps.newTreeMap(comparator); + final Map>> retVal = new TreeMap<>(comparator); for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { Map> interval = retVal.get(dataSegment.getInterval()); if (interval == null) { - Map> tmp = Maps.newHashMap(); + Map> tmp = new HashMap<>(); retVal.put(dataSegment.getInterval(), tmp); } setProperties(retVal, dataSource, dataSegment); @@ -111,13 +112,13 @@ public class IntervalsResource final Comparator comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd()); if (full != null) { - final Map>> retVal = Maps.newTreeMap(comparator); + final Map>> retVal = new TreeMap<>(comparator); for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map> dataSourceInterval = retVal.get(dataSegment.getInterval()); if (dataSourceInterval == null) { - Map> tmp = Maps.newHashMap(); + Map> tmp = new HashMap<>(); retVal.put(dataSegment.getInterval(), tmp); } setProperties(retVal, dataSource, dataSegment); @@ -129,13 +130,13 @@ public class IntervalsResource } if (simple != null) { - final Map> retVal = Maps.newHashMap(); + final Map> retVal = new HashMap<>(); for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { Map properties = retVal.get(dataSegment.getInterval()); if (properties == null) { - properties = Maps.newHashMap(); + properties = new HashMap<>(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); @@ -151,7 +152,7 @@ public class IntervalsResource return Response.ok(retVal).build(); } - final Map retVal = Maps.newHashMap(); + final Map retVal = new HashMap<>(); for (ImmutableDruidDataSource dataSource : datasources) { for (DataSegment dataSegment : dataSource.getSegments()) { if (theInterval.contains(dataSegment.getInterval())) { @@ -171,7 +172,7 @@ public class IntervalsResource { Map properties = retVal.get(dataSegment.getInterval()).get(dataSource.getName()); if (properties == null) { - properties = Maps.newHashMap(); + properties = new HashMap<>(); properties.put("size", dataSegment.getSize()); properties.put("count", 1); diff --git a/server/src/main/java/org/apache/druid/server/http/MetadataResource.java b/server/src/main/java/org/apache/druid/server/http/MetadataResource.java index 043e1b39709..5626662e4aa 100644 --- a/server/src/main/java/org/apache/druid/server/http/MetadataResource.java +++ b/server/src/main/java/org/apache/druid/server/http/MetadataResource.java @@ -55,6 +55,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Stream; /** @@ -96,14 +97,14 @@ public class MetadataResource final Collection druidDataSources = metadataSegmentManager.getInventory(); final Set dataSourceNamesPreAuth; if (includeDisabled != null) { - dataSourceNamesPreAuth = Sets.newTreeSet(metadataSegmentManager.getAllDatasourceNames()); + dataSourceNamesPreAuth = new TreeSet<>(metadataSegmentManager.getAllDatasourceNames()); } else { dataSourceNamesPreAuth = Sets.newTreeSet( Iterables.transform(druidDataSources, ImmutableDruidDataSource::getName) ); } - final Set dataSourceNamesPostAuth = Sets.newTreeSet(); + final Set dataSourceNamesPostAuth = new TreeSet<>(); Function> raGenerator = datasourceName -> { return Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasourceName)); }; diff --git a/server/src/main/java/org/apache/druid/server/http/TiersResource.java b/server/src/main/java/org/apache/druid/server/http/TiersResource.java index a3722e805c4..dfbb85ff618 100644 --- a/server/src/main/java/org/apache/druid/server/http/TiersResource.java +++ b/server/src/main/java/org/apache/druid/server/http/TiersResource.java @@ -23,8 +23,6 @@ import com.google.common.base.Function; import com.google.common.collect.HashBasedTable; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.collect.Table; import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; @@ -43,6 +41,8 @@ import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import java.util.HashMap; +import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -71,12 +71,12 @@ public class TiersResource Response.ResponseBuilder builder = Response.status(Response.Status.OK); if (simple != null) { - Map> metadata = Maps.newHashMap(); + Map> metadata = new HashMap<>(); for (DruidServer druidServer : serverInventoryView.getInventory()) { Map tierMetadata = metadata.get(druidServer.getTier()); if (tierMetadata == null) { - tierMetadata = Maps.newHashMap(); + tierMetadata = new HashMap<>(); metadata.put(druidServer.getTier(), tierMetadata); } @@ -89,7 +89,7 @@ public class TiersResource return builder.entity(metadata).build(); } - Set tiers = Sets.newHashSet(); + Set tiers = new HashSet<>(); for (DruidServer server : serverInventoryView.getInventory()) { tiers.add(server.getTier()); } @@ -112,7 +112,7 @@ public class TiersResource for (DataSegment dataSegment : druidServer.getSegments().values()) { Map properties = retVal.get(dataSegment.getDataSource(), dataSegment.getInterval()); if (properties == null) { - properties = Maps.newHashMap(); + properties = new HashMap<>(); retVal.put(dataSegment.getDataSource(), dataSegment.getInterval(), properties); } properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize()); @@ -124,7 +124,7 @@ public class TiersResource return Response.ok(retVal.rowMap()).build(); } - Set retVal = Sets.newHashSet(); + Set retVal = new HashSet<>(); for (DruidServer druidServer : serverInventoryView.getInventory()) { if (druidServer.getTier().equalsIgnoreCase(tierName)) { retVal.addAll( diff --git a/server/src/main/java/org/apache/druid/server/initialization/AuthorizerMapperModule.java b/server/src/main/java/org/apache/druid/server/initialization/AuthorizerMapperModule.java index 89e4e286743..bd2d7cbf7ce 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/AuthorizerMapperModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/AuthorizerMapperModule.java @@ -21,7 +21,6 @@ package org.apache.druid.server.initialization; import com.fasterxml.jackson.databind.Module; import com.google.common.base.Supplier; -import com.google.common.collect.Maps; import com.google.inject.Binder; import com.google.inject.Inject; import com.google.inject.Injector; @@ -41,6 +40,7 @@ import org.apache.druid.server.security.Authorizer; import org.apache.druid.server.security.AuthorizerMapper; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -88,7 +88,7 @@ public class AuthorizerMapperModule implements DruidModule @Override public AuthorizerMapper get() { - Map authorizerMap = Maps.newHashMap(); + Map authorizerMap = new HashMap<>(); List authorizers = authConfig.getAuthorizers(); validateAuthorizers(authorizers); diff --git a/server/src/main/java/org/apache/druid/server/log/ComposingRequestLoggerProvider.java b/server/src/main/java/org/apache/druid/server/log/ComposingRequestLoggerProvider.java index c066935bc01..0366ae9589e 100644 --- a/server/src/main/java/org/apache/druid/server/log/ComposingRequestLoggerProvider.java +++ b/server/src/main/java/org/apache/druid/server/log/ComposingRequestLoggerProvider.java @@ -22,7 +22,6 @@ package org.apache.druid.server.log; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Throwables; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.common.logger.Logger; @@ -42,7 +41,7 @@ public class ComposingRequestLoggerProvider implements RequestLoggerProvider @JsonProperty @NotNull - private final List loggerProviders = Lists.newArrayList(); + private final List loggerProviders = new ArrayList<>(); @Override public RequestLogger get() diff --git a/server/src/main/java/org/apache/druid/server/metrics/MetricsModule.java b/server/src/main/java/org/apache/druid/server/metrics/MetricsModule.java index c91ad3daf7a..8f0f0db6f83 100644 --- a/server/src/main/java/org/apache/druid/server/metrics/MetricsModule.java +++ b/server/src/main/java/org/apache/druid/server/metrics/MetricsModule.java @@ -21,7 +21,6 @@ package org.apache.druid.server.metrics; import com.google.common.base.Supplier; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.inject.Binder; import com.google.inject.Injector; import com.google.inject.Key; @@ -43,6 +42,7 @@ import org.apache.druid.java.util.metrics.MonitorScheduler; import org.apache.druid.java.util.metrics.SysMonitor; import org.apache.druid.query.ExecutorServiceMonitor; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; @@ -89,7 +89,7 @@ public class MetricsModule implements Module Injector injector ) { - List monitors = Lists.newArrayList(); + List monitors = new ArrayList<>(); for (Class monitorClass : Iterables.concat(monitorsConfig.getMonitors(), monitorSet)) { final Monitor monitor = injector.getInstance(monitorClass); diff --git a/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java b/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java index 4381895b0bc..71300704ce4 100644 --- a/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java +++ b/server/src/main/java/org/apache/druid/server/metrics/MonitorsConfig.java @@ -21,12 +21,12 @@ package org.apache.druid.server.metrics; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.java.util.metrics.Monitor; import org.apache.druid.query.DruidMetrics; import javax.validation.constraints.NotNull; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -109,7 +109,7 @@ public class MonitorsConfig private static List> getMonitorsFromNames(List monitorNames) { - List> monitors = Lists.newArrayList(); + List> monitors = new ArrayList<>(); if (monitorNames == null) { return monitors; } diff --git a/server/src/main/java/org/apache/druid/server/router/CoordinatorRuleManager.java b/server/src/main/java/org/apache/druid/server/router/CoordinatorRuleManager.java index e865112133b..afc1a6416d2 100644 --- a/server/src/main/java/org/apache/druid/server/router/CoordinatorRuleManager.java +++ b/server/src/main/java/org/apache/druid/server/router/CoordinatorRuleManager.java @@ -22,7 +22,6 @@ package org.apache.druid.server.router; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; -import com.google.common.collect.Lists; import com.google.inject.Inject; import org.apache.druid.discovery.DruidLeaderClient; import org.apache.druid.guice.ManageLifecycle; @@ -39,6 +38,7 @@ import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.Duration; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -165,7 +165,7 @@ public class CoordinatorRuleManager public List getRulesWithDefault(final String dataSource) { - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); Map> theRules = rules.get(); if (theRules.get(dataSource) != null) { retVal.addAll(theRules.get(dataSource)); diff --git a/server/src/main/java/org/apache/druid/server/security/AuthTestUtils.java b/server/src/main/java/org/apache/druid/server/security/AuthTestUtils.java index 7b73c65422e..5922a5d234c 100644 --- a/server/src/main/java/org/apache/druid/server/security/AuthTestUtils.java +++ b/server/src/main/java/org/apache/druid/server/security/AuthTestUtils.java @@ -19,8 +19,7 @@ package org.apache.druid.server.security; -import com.google.common.collect.Maps; - +import java.util.HashMap; import java.util.Map; public class AuthTestUtils @@ -29,7 +28,7 @@ public class AuthTestUtils public static final AuthorizerMapper TEST_AUTHORIZER_MAPPER; static { - final Map defaultMap = Maps.newHashMap(); + final Map defaultMap = new HashMap<>(); defaultMap.put(AuthConfig.ALLOW_ALL_NAME, new AllowAllAuthenticator()); TEST_AUTHENTICATOR_MAPPER = new AuthenticatorMapper(defaultMap); diff --git a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java index 651bed8eb5a..56a6f532531 100644 --- a/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java +++ b/server/src/main/java/org/apache/druid/server/security/AuthorizationUtils.java @@ -22,12 +22,12 @@ package org.apache.druid.server.security; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.ISE; import javax.servlet.http.HttpServletRequest; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -112,7 +112,7 @@ public class AuthorizationUtils } // this method returns on first failure, so only successful Access results are kept in the cache - final Set resultCache = Sets.newHashSet(); + final Set resultCache = new HashSet<>(); for (ResourceAction resourceAction : resourceActions) { if (resultCache.contains(resourceAction)) { @@ -263,7 +263,7 @@ public class AuthorizationUtils throw new ISE("No authorizer found with name: [%s].", authenticationResult.getAuthorizerName()); } - final Map resultCache = Maps.newHashMap(); + final Map resultCache = new HashMap<>(); final Iterable filteredResources = Iterables.filter( resources, resource -> { @@ -329,7 +329,7 @@ public class AuthorizationUtils final AuthenticationResult authenticationResult = AuthorizationUtils.authenticationResultFromRequest(request); - Map> filteredResources = Maps.newHashMap(); + Map> filteredResources = new HashMap<>(); for (Map.Entry> entry : unfilteredResources.entrySet()) { if (entry.getValue() == null) { continue; diff --git a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java index 415ac811d20..f88edada1e1 100644 --- a/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java +++ b/server/src/main/java/org/apache/druid/server/security/PreResponseAuthorizationCheckFilter.java @@ -20,7 +20,6 @@ package org.apache.druid.server.security; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.query.QueryInterruptedException; @@ -38,6 +37,7 @@ import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.OutputStream; import java.nio.charset.StandardCharsets; +import java.util.HashSet; import java.util.List; import java.util.Set; @@ -116,7 +116,7 @@ public class PreResponseAuthorizationCheckFilter implements Filter // Since this is the last filter in the chain, some previous authentication filter // should have placed an authentication result in the request. // If not, send an authentication challenge. - Set supportedAuthSchemes = Sets.newHashSet(); + Set supportedAuthSchemes = new HashSet<>(); for (Authenticator authenticator : authenticators) { String challengeHeader = authenticator.getAuthChallengeHeader(); if (challengeHeader != null) { diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java index 1f9b4fc2051..d5bd504191a 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java @@ -31,7 +31,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.hash.HashFunction; @@ -651,7 +650,7 @@ public class CachingClusteredClientTest DateTimes.of("2011-01-09T00"), 18, 521, DateTimes.of("2011-01-09T02"), 181, 52 ), - runner.run(QueryPlus.wrap(query), Maps.newHashMap()) + runner.run(QueryPlus.wrap(query), new HashMap<>()) ); } @@ -1847,7 +1846,7 @@ public class CachingClusteredClientTest parseResults(queryIntervals, expectedResults, args); for (int i = 0; i < queryIntervals.size(); ++i) { - List mocks = Lists.newArrayList(); + List mocks = new ArrayList<>(); mocks.add(serverView); final Interval actualQueryInterval = new Interval( @@ -1877,8 +1876,8 @@ public class CachingClusteredClientTest QueryRunner queryable = expectations.getQueryRunner(); if (query instanceof TimeseriesQuery) { - final List segmentIds = Lists.newArrayList(); - final List>> results = Lists.newArrayList(); + final List segmentIds = new ArrayList<>(); + final List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); results.add(expectation.getResults()); @@ -1945,7 +1944,7 @@ public class CachingClusteredClientTest ) { MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec) query.getQuerySegmentSpec(); - List> ret = Lists.newArrayList(); + List> ret = new ArrayList<>(); for (SegmentDescriptor descriptor : spec.getDescriptors()) { String id = StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()); int index = segmentIds.indexOf(id); @@ -1986,7 +1985,7 @@ public class CachingClusteredClientTest parseResults(queryIntervals, expectedResults, args); for (int i = 0; i < queryIntervals.size(); ++i) { - List mocks = Lists.newArrayList(); + List mocks = new ArrayList<>(); mocks.add(serverView); final Interval actualQueryInterval = new Interval( @@ -2000,7 +1999,7 @@ public class CachingClusteredClientTest mocks ); - List queryCaptures = Lists.newArrayList(); + List queryCaptures = new ArrayList<>(); final Map finalExpectation = serverExpectationList.get( serverExpectationList.size() - 1 ); @@ -2019,9 +2018,9 @@ public class CachingClusteredClientTest QueryRunner queryable = expectations.getQueryRunner(); if (query instanceof TimeseriesQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List>> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2032,9 +2031,9 @@ public class CachingClusteredClientTest .once(); } else if (query instanceof TopNQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List>> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2044,9 +2043,9 @@ public class CachingClusteredClientTest .andReturn(toQueryableTopNResults(segmentIds, intervals, results)) .once(); } else if (query instanceof SearchQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List>> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2056,9 +2055,9 @@ public class CachingClusteredClientTest .andReturn(toQueryableSearchResults(segmentIds, intervals, results)) .once(); } else if (query instanceof SelectQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List>> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2068,9 +2067,9 @@ public class CachingClusteredClientTest .andReturn(toQueryableSelectResults(segmentIds, intervals, results)) .once(); } else if (query instanceof GroupByQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2080,9 +2079,9 @@ public class CachingClusteredClientTest .andReturn(toQueryableGroupByResults(segmentIds, intervals, results)) .once(); } else if (query instanceof TimeBoundaryQuery) { - List segmentIds = Lists.newArrayList(); - List intervals = Lists.newArrayList(); - List>> results = Lists.newArrayList(); + List segmentIds = new ArrayList<>(); + List intervals = new ArrayList<>(); + List>> results = new ArrayList<>(); for (ServerExpectation expectation : expectations) { segmentIds.add(expectation.getSegmentId()); intervals.add(expectation.getInterval()); @@ -2125,7 +2124,7 @@ public class CachingClusteredClientTest @Override public Iterable>> apply(@Nullable Integer input) { - List>> retVal = Lists.newArrayList(); + List>> retVal = new ArrayList<>(); final Map exps = serverExpectationList.get(input); for (ServerExpectations expectations : exps.values()) { @@ -2182,11 +2181,11 @@ public class CachingClusteredClientTest { timeline = new VersionedIntervalTimeline<>(Ordering.natural()); - final List> serverExpectationList = Lists.newArrayList(); + final List> serverExpectationList = new ArrayList<>(); for (int k = 0; k < numQueryIntervals + 1; ++k) { final int numChunks = expectedResults.get(k).size(); - final TreeMap serverExpectations = Maps.newTreeMap(); + final TreeMap serverExpectations = new TreeMap<>(); serverExpectationList.add(serverExpectations); for (int j = 0; j < numChunks; ++j) { DruidServer lastServer = servers[random.nextInt(servers.length)]; @@ -2524,12 +2523,12 @@ public class CachingClusteredClientTest private Iterable> makeTopNResults(List names, Object... objects) { Preconditions.checkArgument(names.size() == 7); - List> retVal = Lists.newArrayList(); + List> retVal = new ArrayList<>(); int index = 0; while (index < objects.length) { DateTime timestamp = (DateTime) objects[index++]; - List> values = Lists.newArrayList(); + List> values = new ArrayList<>(); while (index < objects.length && !(objects[index] instanceof DateTime)) { if (objects.length - index < 3) { throw new ISE( @@ -2575,12 +2574,12 @@ public class CachingClusteredClientTest private Iterable> makeSearchResults(String dim, Object... objects) { - List> retVal = Lists.newArrayList(); + List> retVal = new ArrayList<>(); int index = 0; while (index < objects.length) { DateTime timestamp = (DateTime) objects[index++]; - List values = Lists.newArrayList(); + List values = new ArrayList<>(); while (index < objects.length && !(objects[index] instanceof DateTime)) { values.add(new SearchHit(dim, objects[index++].toString(), (Integer) objects[index++])); } @@ -2596,12 +2595,12 @@ public class CachingClusteredClientTest Object... objects ) { - List> retVal = Lists.newArrayList(); + List> retVal = new ArrayList<>(); int index = 0; while (index < objects.length) { DateTime timestamp = (DateTime) objects[index++]; - List values = Lists.newArrayList(); + List values = new ArrayList<>(); while (index < objects.length && !(objects[index] instanceof DateTime)) { values.add(new EventHolder(null, 0, (Map) objects[index++])); @@ -2618,7 +2617,7 @@ public class CachingClusteredClientTest private Iterable makeGroupByResults(Object... objects) { - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); int index = 0; while (index < objects.length) { DateTime timestamp = (DateTime) objects[index++]; @@ -2897,7 +2896,7 @@ public class CachingClusteredClientTest { private final DruidServer server; private final QueryRunner queryRunner; - private final List expectations = Lists.newArrayList(); + private final List expectations = new ArrayList<>(); public ServerExpectations( DruidServer server, diff --git a/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java index b20edd804a0..12c653a3c52 100644 --- a/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java @@ -72,6 +72,7 @@ import org.junit.runners.Parameterized; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -400,12 +401,12 @@ public class CachingQueryRunnerTest private List makeTopNResults(boolean cachedResults, Object... objects) { - List retVal = Lists.newArrayList(); + List retVal = new ArrayList<>(); int index = 0; while (index < objects.length) { DateTime timestamp = (DateTime) objects[index++]; - List> values = Lists.newArrayList(); + List> values = new ArrayList<>(); while (index < objects.length && !(objects[index] instanceof DateTime)) { if (objects.length - index < 3) { throw new ISE( diff --git a/server/src/test/java/org/apache/druid/client/DirectDruidClientTest.java b/server/src/test/java/org/apache/druid/client/DirectDruidClientTest.java index 06174b4896f..65a8cd62c44 100644 --- a/server/src/test/java/org/apache/druid/client/DirectDruidClientTest.java +++ b/server/src/test/java/org/apache/druid/client/DirectDruidClientTest.java @@ -20,8 +20,6 @@ package org.apache.druid.client; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; @@ -61,6 +59,7 @@ import org.junit.Test; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.URL; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -123,9 +122,9 @@ public class DirectDruidClientTest "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0L @@ -241,9 +240,9 @@ public class DirectDruidClientTest "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0L @@ -310,9 +309,9 @@ public class DirectDruidClientTest "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0L diff --git a/server/src/test/java/org/apache/druid/client/cache/CacheDistributionTest.java b/server/src/test/java/org/apache/druid/client/cache/CacheDistributionTest.java index 79de6ac6e68..f578be5028a 100644 --- a/server/src/test/java/org/apache/druid/client/cache/CacheDistributionTest.java +++ b/server/src/test/java/org/apache/druid/client/cache/CacheDistributionTest.java @@ -21,7 +21,6 @@ package org.apache.druid.client.cache; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import net.spy.memcached.DefaultHashAlgorithm; import net.spy.memcached.HashAlgorithm; @@ -39,6 +38,7 @@ import org.junit.runners.Parameterized; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Arrays; +import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; @@ -112,7 +112,7 @@ public class CacheDistributionTest } ); - Map counter = Maps.newHashMap(); + Map counter = new HashMap<>(); long t = 0; for (int i = 0; i < KEY_COUNT; ++i) { final String k = DigestUtils.sha1Hex("abc" + i) + ":" + DigestUtils.sha1Hex("xyz" + i); diff --git a/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java b/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java index 67f93012f80..1c3853b0f5b 100644 --- a/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java +++ b/server/src/test/java/org/apache/druid/client/cache/CaffeineCacheTest.java @@ -38,6 +38,7 @@ import org.junit.Test; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.Collections; import java.util.Map; import java.util.Properties; @@ -170,7 +171,7 @@ public class CaffeineCacheTest result = cache.getBulk(Collections.singletonList(missingKey)); Assert.assertEquals(result.size(), 0); - result = cache.getBulk(Lists.newArrayList()); + result = cache.getBulk(new ArrayList<>()); Assert.assertEquals(result.size(), 0); } diff --git a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java index 147ae6a488d..313ecdfd35a 100644 --- a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java +++ b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheBenchmark.java @@ -23,7 +23,6 @@ import com.google.caliper.Param; import com.google.caliper.Runner; import com.google.caliper.SimpleBenchmark; import com.google.common.base.Suppliers; -import com.google.common.collect.Lists; import net.spy.memcached.AddrUtil; import net.spy.memcached.ConnectionFactoryBuilder; import net.spy.memcached.DefaultHashAlgorithm; @@ -34,6 +33,7 @@ import net.spy.memcached.transcoders.SerializingTranscoder; import org.apache.druid.collections.StupidResourceHolder; import org.apache.druid.java.util.common.StringUtils; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; @@ -147,7 +147,7 @@ public class MemcachedCacheBenchmark extends SimpleBenchmark { long count = 0; for (int i = 0; i < reps; i++) { - List keys = Lists.newArrayList(); + List keys = new ArrayList<>(); for (int k = 0; k < objectCount; ++k) { String key = BASE_KEY + k; keys.add(new Cache.NamedKey(NAMESPACE, StringUtils.toUtf8(key))); diff --git a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheTest.java b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheTest.java index ac13395c11d..4326e0e9098 100644 --- a/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheTest.java +++ b/server/src/test/java/org/apache/druid/client/cache/MemcachedCacheTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import com.google.inject.Binder; import com.google.inject.Inject; @@ -65,6 +64,7 @@ import org.junit.Test; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -674,7 +674,7 @@ class MockMemcachedClient implements MemcachedClientIF @Override public Map get() { - Map retVal = Maps.newHashMap(); + Map retVal = new HashMap<>(); while (keys.hasNext()) { String key = keys.next(); diff --git a/server/src/test/java/org/apache/druid/client/indexing/ClientMergeQueryTest.java b/server/src/test/java/org/apache/druid/client/indexing/ClientMergeQueryTest.java index 61fd885eb1d..7983f5efaa1 100644 --- a/server/src/test/java/org/apache/druid/client/indexing/ClientMergeQueryTest.java +++ b/server/src/test/java/org/apache/druid/client/indexing/ClientMergeQueryTest.java @@ -19,7 +19,6 @@ package org.apache.druid.client.indexing; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.timeline.DataSegment; @@ -28,6 +27,7 @@ import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -39,7 +39,7 @@ public class ClientMergeQueryTest private static final DataSegment DATA_SEGMENT = new DataSegment(DATA_SOURCE, INTERVAL, START.toString(), null, null, null, null, 0, 0); private static final List SEGMENT_LIST = Collections.singletonList(DATA_SEGMENT); - private static final List AGGREGATOR_LIST = Lists.newArrayList(); + private static final List AGGREGATOR_LIST = new ArrayList<>(); private static final ClientMergeQuery CLIENT_MERGE_QUERY = new ClientMergeQuery(DATA_SOURCE, SEGMENT_LIST, AGGREGATOR_LIST); diff --git a/server/src/test/java/org/apache/druid/client/selector/TierSelectorStrategyTest.java b/server/src/test/java/org/apache/druid/client/selector/TierSelectorStrategyTest.java index 7a8dbff00de..ce7fe8aac86 100644 --- a/server/src/test/java/org/apache/druid/client/selector/TierSelectorStrategyTest.java +++ b/server/src/test/java/org/apache/druid/client/selector/TierSelectorStrategyTest.java @@ -19,8 +19,6 @@ package org.apache.druid.client.selector; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.client.DirectDruidClient; import org.apache.druid.client.DruidServer; import org.apache.druid.java.util.common.DateTimes; @@ -36,6 +34,7 @@ import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; public class TierSelectorStrategyTest @@ -122,9 +121,9 @@ public class TierSelectorStrategyTest "test", Intervals.of("2013-01-01/2013-01-02"), DateTimes.of("2013-01-01").toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0L @@ -134,7 +133,7 @@ public class TierSelectorStrategyTest List servers = new ArrayList<>(Arrays.asList(expectedSelection)); - List expectedCandidates = Lists.newArrayList(); + List expectedCandidates = new ArrayList<>(); for (QueryableDruidServer server : servers) { expectedCandidates.add(server.getServer().getMetadata()); } diff --git a/server/src/test/java/org/apache/druid/curator/inventory/CuratorInventoryManagerTest.java b/server/src/test/java/org/apache/druid/curator/inventory/CuratorInventoryManagerTest.java index 120db3a4258..4190b10744d 100644 --- a/server/src/test/java/org/apache/druid/curator/inventory/CuratorInventoryManagerTest.java +++ b/server/src/test/java/org/apache/druid/curator/inventory/CuratorInventoryManagerTest.java @@ -20,7 +20,6 @@ package org.apache.druid.curator.inventory; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.api.CuratorEvent; @@ -36,6 +35,7 @@ import org.junit.Before; import org.junit.Test; import java.util.Map; +import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; @@ -185,7 +185,7 @@ public class CuratorInventoryManagerTest extends CuratorTestBase @Override public Map deserializeContainer(byte[] bytes) { - return Maps.newTreeMap(); + return new TreeMap<>(); } @Override diff --git a/server/src/test/java/org/apache/druid/initialization/InitializationTest.java b/server/src/test/java/org/apache/druid/initialization/InitializationTest.java index 820eed5318f..3c3d4fe51a8 100644 --- a/server/src/test/java/org/apache/druid/initialization/InitializationTest.java +++ b/server/src/test/java/org/apache/druid/initialization/InitializationTest.java @@ -120,7 +120,7 @@ public class InitializationTest Collection modules = Initialization.getFromExtensions(new ExtensionsConfig(), DruidModule.class); - Set loadedModuleNames = Sets.newHashSet(); + Set loadedModuleNames = new HashSet<>(); for (DruidModule module : modules) { Assert.assertFalse("Duplicate extensions are loaded", loadedModuleNames.contains(module.getClass().getName())); loadedModuleNames.add(module.getClass().getName()); diff --git a/server/src/test/java/org/apache/druid/realtime/firehose/CombiningFirehoseFactoryTest.java b/server/src/test/java/org/apache/druid/realtime/firehose/CombiningFirehoseFactoryTest.java index 2f3729f30dc..c81d309cd8a 100644 --- a/server/src/test/java/org/apache/druid/realtime/firehose/CombiningFirehoseFactoryTest.java +++ b/server/src/test/java/org/apache/druid/realtime/firehose/CombiningFirehoseFactoryTest.java @@ -19,7 +19,6 @@ package org.apache.druid.realtime.firehose; -import com.google.common.collect.Lists; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.FirehoseFactory; import org.apache.druid.data.input.InputRow; @@ -36,6 +35,7 @@ import org.junit.Test; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; @@ -90,7 +90,7 @@ public class CombiningFirehoseFactoryTest @Override public List getDimension(String dimension) { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override diff --git a/server/src/test/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java b/server/src/test/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java index 844eba701e1..5a54f8ad4d6 100644 --- a/server/src/test/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java +++ b/server/src/test/java/org/apache/druid/segment/loading/SegmentLoaderLocalCacheManagerTest.java @@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.emitter.EmittingLogger; @@ -44,6 +43,7 @@ import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; +import java.util.ArrayList; import java.util.Collection; import java.util.List; @@ -87,7 +87,7 @@ public class SegmentLoaderLocalCacheManagerTest EmittingLogger.registerEmitter(new NoopServiceEmitter()); localSegmentCacheFolder = tmpFolder.newFolder("segment_cache_folder"); - final List locations = Lists.newArrayList(); + final List locations = new ArrayList<>(); final StorageLocationConfig locationConfig = new StorageLocationConfig(); locationConfig.setPath(localSegmentCacheFolder); locationConfig.setMaxSize(10000000000L); @@ -156,7 +156,7 @@ public class SegmentLoaderLocalCacheManagerTest { final File localStorageFolder = tmpFolder.newFolder("local_storage_folder"); - final List locations = Lists.newArrayList(); + final List locations = new ArrayList<>(); final StorageLocationConfig locationConfig = new StorageLocationConfig(); locationConfig.setPath(localStorageFolder); locationConfig.setMaxSize(10000000000L); @@ -206,7 +206,7 @@ public class SegmentLoaderLocalCacheManagerTest @Test public void testRetrySuccessAtSecondLocation() throws Exception { - final List locations = Lists.newArrayList(); + final List locations = new ArrayList<>(); final StorageLocationConfig locationConfig = new StorageLocationConfig(); final File localStorageFolder = tmpFolder.newFolder("local_storage_folder"); // mock can't write in first location @@ -259,7 +259,7 @@ public class SegmentLoaderLocalCacheManagerTest @Test public void testRetryAllFail() throws Exception { - final List locations = Lists.newArrayList(); + final List locations = new ArrayList<>(); final StorageLocationConfig locationConfig = new StorageLocationConfig(); final File localStorageFolder = tmpFolder.newFolder("local_storage_folder"); // mock can't write in first location @@ -315,7 +315,7 @@ public class SegmentLoaderLocalCacheManagerTest @Test public void testEmptyToFullOrder() throws Exception { - final List locations = Lists.newArrayList(); + final List locations = new ArrayList<>(); final StorageLocationConfig locationConfig = new StorageLocationConfig(); final File localStorageFolder = tmpFolder.newFolder("local_storage_folder"); localStorageFolder.setWritable(true); diff --git a/server/src/test/java/org/apache/druid/segment/realtime/RealtimeManagerTest.java b/server/src/test/java/org/apache/druid/segment/realtime/RealtimeManagerTest.java index 8d2a45cada2..247d2146eab 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/RealtimeManagerTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/RealtimeManagerTest.java @@ -27,7 +27,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.data.input.Committer; import org.apache.druid.data.input.Firehose; @@ -92,6 +91,7 @@ import org.junit.Test; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; @@ -793,7 +793,7 @@ public class RealtimeManagerTest @Override public List getDimension(String dimension) { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java index 3aeeb78b0b4..4d3528de33b 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java @@ -21,13 +21,13 @@ package org.apache.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.timeline.partition.LinearShardSpec; import org.junit.Assert; import org.junit.Test; +import java.util.HashMap; import java.util.Map; public class CommittedTest @@ -61,7 +61,7 @@ public class CommittedTest private static Committed fixedInstance() { - final Map hydrants = Maps.newHashMap(); + final Map hydrants = new HashMap<>(); hydrants.put(IDENTIFIER1, 3); hydrants.put(IDENTIFIER2, 2); return new Committed(hydrants, ImmutableMap.of("metadata", "foo")); diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java index 421cc570084..34d0e6f96ee 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/StreamAppenderatorDriverTest.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListenableFuture; import org.apache.druid.data.input.Committer; import org.apache.druid.data.input.InputRow; @@ -55,6 +54,7 @@ import org.junit.Test; import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -408,7 +408,7 @@ public class StreamAppenderatorDriverTest extends EasyMockSupport { private final String dataSource; private final Granularity granularity; - private final Map counters = Maps.newHashMap(); + private final Map counters = new HashMap<>(); public TestSegmentAllocator(String dataSource, Granularity granularity) { diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java index de53f890e9c..d6c1e7ee234 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/TestUsedSegmentChecker.java @@ -20,12 +20,12 @@ package org.apache.druid.segment.realtime.appenderator; import com.google.common.collect.Ordering; -import com.google.common.collect.Sets; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; +import java.util.HashSet; import java.util.Set; public class TestUsedSegmentChecker implements UsedSegmentChecker @@ -49,7 +49,7 @@ public class TestUsedSegmentChecker implements UsedSegmentChecker ); } - final Set retVal = Sets.newHashSet(); + final Set retVal = new HashSet<>(); for (SegmentIdentifier identifier : identifiers) { for (TimelineObjectHolder holder : timeline.lookup(identifier.getInterval())) { for (PartitionChunk chunk : holder.getObject()) { diff --git a/server/src/test/java/org/apache/druid/segment/realtime/firehose/LocalFirehoseFactoryTest.java b/server/src/test/java/org/apache/druid/segment/realtime/firehose/LocalFirehoseFactoryTest.java index 1d836630880..9930e8426af 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/firehose/LocalFirehoseFactoryTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/firehose/LocalFirehoseFactoryTest.java @@ -19,7 +19,6 @@ package org.apache.druid.segment.realtime.firehose; -import com.google.common.collect.Lists; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.Row; import org.apache.druid.data.input.impl.CSVParseSpec; @@ -80,8 +79,8 @@ public class LocalFirehoseFactoryTest ), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ), ",", Arrays.asList("timestamp", "a"), diff --git a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java index 59321b1765c..6626c867a99 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/firehose/SqlFirehoseFactoryTest.java @@ -21,7 +21,6 @@ package org.apache.druid.segment.realtime.firehose; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.Firehose; @@ -73,8 +72,8 @@ public class SqlFirehoseFactoryTest new TimestampSpec("timestamp", "auto", null), new DimensionsSpec( DimensionsSpec.getDefaultSchemas(Arrays.asList("timestamp", "a", "b")), - Lists.newArrayList(), - Lists.newArrayList() + new ArrayList<>(), + new ArrayList<>() ) ) ); diff --git a/server/src/test/java/org/apache/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java b/server/src/test/java/org/apache/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java index ca26db2ce79..a3a9b5ba8c8 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/plumber/RealtimePlumberSchoolTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.io.Files; import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.io.FileUtils; @@ -73,6 +72,7 @@ import org.junit.runners.Parameterized; import java.io.File; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -91,7 +91,7 @@ public class RealtimePlumberSchoolTest new MessageTimeRejectionPolicyFactory() }; - final List constructors = Lists.newArrayList(); + final List constructors = new ArrayList<>(); for (RejectionPolicyFactory rejectionPolicy : rejectionPolicies) { constructors.add(new Object[]{rejectionPolicy, OffHeapMemorySegmentWriteOutMediumFactory.instance()}); constructors.add(new Object[]{rejectionPolicy, TmpFileSegmentWriteOutMediumFactory.instance()}); @@ -214,7 +214,7 @@ public class RealtimePlumberSchoolTest realtimePlumberSchool = new RealtimePlumberSchool( emitter, - new DefaultQueryRunnerFactoryConglomerate(Maps.newHashMap()), + new DefaultQueryRunnerFactoryConglomerate(new HashMap<>()), dataSegmentPusher, announcer, segmentPublisher, @@ -588,7 +588,7 @@ public class RealtimePlumberSchoolTest @Override public List getDimensions() { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override @@ -606,7 +606,7 @@ public class RealtimePlumberSchoolTest @Override public List getDimension(String dimension) { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override diff --git a/server/src/test/java/org/apache/druid/segment/realtime/plumber/SinkTest.java b/server/src/test/java/org/apache/druid/segment/realtime/plumber/SinkTest.java index b0ca66836af..de55360a61d 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/plumber/SinkTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/plumber/SinkTest.java @@ -22,7 +22,6 @@ package org.apache.druid.segment.realtime.plumber; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; -import com.google.common.collect.Lists; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.Row; @@ -43,6 +42,7 @@ import org.joda.time.Period; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.List; /** @@ -100,7 +100,7 @@ public class SinkTest @Override public List getDimensions() { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override @@ -118,7 +118,7 @@ public class SinkTest @Override public List getDimension(String dimension) { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override @@ -154,7 +154,7 @@ public class SinkTest @Override public List getDimensions() { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override @@ -172,7 +172,7 @@ public class SinkTest @Override public List getDimension(String dimension) { - return Lists.newArrayList(); + return new ArrayList<>(); } @Override diff --git a/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java b/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java index aaa1b04a89f..966dc0a284b 100644 --- a/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java @@ -21,7 +21,6 @@ package org.apache.druid.server; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.MapUtils; @@ -43,6 +42,7 @@ import org.junit.Before; import org.junit.Test; import java.io.File; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -144,8 +144,8 @@ public class SegmentManagerTest Intervals.of("0/1000"), "0", ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 10 @@ -155,8 +155,8 @@ public class SegmentManagerTest Intervals.of("1000/2000"), "0", ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 10 @@ -166,8 +166,8 @@ public class SegmentManagerTest Intervals.of("0/1000"), "0", ImmutableMap.of("interval", Intervals.of("0/1000"), "version", 0), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 100 @@ -177,8 +177,8 @@ public class SegmentManagerTest Intervals.of("1000/2000"), "0", ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 0), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 100 @@ -189,8 +189,8 @@ public class SegmentManagerTest Intervals.of("1000/2000"), "1", ImmutableMap.of("interval", Intervals.of("1000/2000"), "version", 1), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 1, 100 diff --git a/server/src/test/java/org/apache/druid/server/StatusResourceTest.java b/server/src/test/java/org/apache/druid/server/StatusResourceTest.java index b5db403bb78..8b184aa47eb 100644 --- a/server/src/test/java/org/apache/druid/server/StatusResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/StatusResourceTest.java @@ -21,7 +21,6 @@ package org.apache.druid.server; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Sets; import com.google.inject.Guice; import com.google.inject.Injector; import org.apache.druid.guice.PropertiesModule; @@ -32,6 +31,7 @@ import org.junit.Test; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -70,7 +70,7 @@ public class StatusResourceTest Injector injector = Guice.createInjector(Collections.singletonList(new PropertiesModule(Collections.singletonList( "status.resource.test.runtime.properties")))); Map returnedProperties = injector.getInstance(StatusResource.class).getProperties(); - Set hiddenProperties = Sets.newHashSet(); + Set hiddenProperties = new HashSet<>(); Splitter.on(",").split(returnedProperties.get("druid.server.hiddenProperties")).forEach(hiddenProperties::add); hiddenProperties.forEach((property) -> Assert.assertNull(returnedProperties.get(property))); } diff --git a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java index 7c28658173f..d607e0e609d 100644 --- a/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordination/ServerManagerTest.java @@ -25,7 +25,6 @@ import com.google.common.base.Functions; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.cache.ForegroundCachePopulator; @@ -78,6 +77,7 @@ import org.junit.Test; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -510,8 +510,8 @@ public class ServerManagerTest private final CountDownLatch waitLatch; private final CountDownLatch waitYieldLatch; private final CountDownLatch notifyLatch; - private List adapters = Lists.newArrayList(); - private List segmentReferences = Lists.newArrayList(); + private List adapters = new ArrayList<>(); + private List segmentReferences = new ArrayList<>(); public MyQueryRunnerFactory( diff --git a/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index ecf4c4ccac3..e52c2783fac 100644 --- a/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -51,6 +51,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -151,7 +152,7 @@ public class BatchDataSegmentAnnouncerTest jsonMapper ); - testSegments = Sets.newHashSet(); + testSegments = new HashSet<>(); for (int i = 0; i < 100; i++) { testSegments.add(makeSegment(i)); } @@ -305,7 +306,7 @@ public class BatchDataSegmentAnnouncerTest Assert.assertEquals(2, zNodes.size()); - Set allSegments = Sets.newHashSet(); + Set allSegments = new HashSet<>(); for (String zNode : zNodes) { allSegments.addAll(segmentReader.read(joiner.join(testSegmentsPath, zNode))); } @@ -391,7 +392,7 @@ public class BatchDataSegmentAnnouncerTest throw Throwables.propagate(e); } - return Sets.newHashSet(); + return new HashSet<>(); } } } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/CoordinatorStatsTest.java b/server/src/test/java/org/apache/druid/server/coordinator/CoordinatorStatsTest.java index 3b1ddaccea9..a2c9b51ed2e 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/CoordinatorStatsTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/CoordinatorStatsTest.java @@ -20,13 +20,13 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.HashMap; import java.util.Map; public class CoordinatorStatsTest @@ -95,7 +95,7 @@ public class CoordinatorStatsTest "tier2", 2L, "tier3", 3L ); - final Map actual = Maps.newHashMap(); + final Map actual = new HashMap<>(); expected.forEach( (tier, count) -> stats.addToTieredStat("stat", tier, count) diff --git a/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java index 4f8b2934092..f4e71cb9d11 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java @@ -20,8 +20,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ImmutableDruidServer; @@ -36,6 +34,8 @@ import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -51,12 +51,12 @@ public class CostBalancerStrategyTest */ public static List setupDummyCluster(int serverCount, int maxSegments) { - List serverHolderList = Lists.newArrayList(); + List serverHolderList = new ArrayList<>(); // Create 10 servers with current size being 3K & max size being 10K // Each having having 100 segments for (int i = 0; i < serverCount; i++) { LoadQueuePeonTester fromPeon = new LoadQueuePeonTester(); - Map segments = Maps.newHashMap(); + Map segments = new HashMap<>(); for (int j = 0; j < maxSegments; j++) { DataSegment segment = getSegment(j); segments.put(segment.getIdentifier(), segment); @@ -82,7 +82,7 @@ public class CostBalancerStrategyTest EasyMock.expect(druidServer.getMaxSize()).andReturn(10000000L).anyTimes(); EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); - Map segments = Maps.newHashMap(); + Map segments = new HashMap<>(); for (int j = 0; j < (maxSegments - 2); j++) { DataSegment segment = getSegment(j); segments.put(segment.getIdentifier(), segment); @@ -115,8 +115,8 @@ public class CostBalancerStrategyTest interval, String.valueOf(index), new ConcurrentHashMap<>(), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), null, 0, index * 100L diff --git a/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java b/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java index 81f73bea5e3..9312e4bed00 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/CuratorDruidCoordinatorTest.java @@ -62,6 +62,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -305,8 +306,8 @@ public class CuratorDruidCoordinatorTest extends CuratorTestBase DataSegment segmentToMove = sourceSegments.get(2); - List sourceSegKeys = Lists.newArrayList(); - List destSegKeys = Lists.newArrayList(); + List sourceSegKeys = new ArrayList<>(); + List destSegKeys = new ArrayList<>(); for (DataSegment segment : sourceSegments) { sourceSegKeys.add(announceBatchSegmentsForServer(source, ImmutableSet.of(segment), zkPathsConfig, jsonMapper)); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index f7b519d37c6..03855374ec1 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -20,8 +20,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.ImmutableDruidServer; @@ -34,6 +32,8 @@ import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -49,12 +49,12 @@ public class DiskNormalizedCostBalancerStrategyTest */ public static List setupDummyCluster(int serverCount, int maxSegments) { - List serverHolderList = Lists.newArrayList(); + List serverHolderList = new ArrayList<>(); // Create 10 servers with current size being 3K & max size being 10K // Each having having 100 segments for (int i = 0; i < serverCount; i++) { LoadQueuePeonTester fromPeon = new LoadQueuePeonTester(); - Map segments = Maps.newHashMap(); + Map segments = new HashMap<>(); for (int j = 0; j < maxSegments; j++) { DataSegment segment = getSegment(j); segments.put(segment.getIdentifier(), segment); @@ -80,7 +80,7 @@ public class DiskNormalizedCostBalancerStrategyTest EasyMock.expect(druidServer.getMaxSize()).andReturn(100000000L).anyTimes(); EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); - Map segments = Maps.newHashMap(); + Map segments = new HashMap<>(); for (int j = 0; j < maxSegments; j++) { DataSegment segment = getSegment(j); segments.put(segment.getIdentifier(), segment); @@ -113,8 +113,8 @@ public class DiskNormalizedCostBalancerStrategyTest interval, String.valueOf(index), new ConcurrentHashMap<>(), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), null, 0, index * 100L diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java index c505657c9af..e65a9391fdb 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerProfiler.java @@ -22,8 +22,6 @@ package org.apache.druid.server.coordinator; import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.client.DruidServer; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.java.util.common.DateTimes; @@ -41,6 +39,7 @@ import org.joda.time.Interval; import org.joda.time.Period; import org.junit.Before; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,7 +53,7 @@ public class DruidCoordinatorBalancerProfiler private DruidCoordinator coordinator; private ImmutableDruidServer druidServer1; private ImmutableDruidServer druidServer2; - Map segments = Maps.newHashMap(); + Map segments = new HashMap<>(); ServiceEmitter emitter; MetadataRuleManager manager; PeriodLoadRule loadRule = new PeriodLoadRule(new Period("P5000Y"), ImmutableMap.of("normal", 3)); @@ -90,10 +89,10 @@ public class DruidCoordinatorBalancerProfiler EasyMock.expectLastCall().anyTimes(); EasyMock.replay(coordinator); - List serverList = Lists.newArrayList(); - Map peonMap = Maps.newHashMap(); - List serverHolderList = Lists.newArrayList(); - Map segmentMap = Maps.newHashMap(); + List serverList = new ArrayList<>(); + Map peonMap = new HashMap<>(); + List serverHolderList = new ArrayList<>(); + Map segmentMap = new HashMap<>(); for (int i = 0; i < numSegments; i++) { segmentMap.put( "segment" + i, @@ -101,9 +100,9 @@ public class DruidCoordinatorBalancerProfiler "datasource" + i, new Interval(DateTimes.of("2012-01-01"), (DateTimes.of("2012-01-01")).plusHours(1)), (DateTimes.of("2012-03-01")).toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 4L diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java index 52d4812f357..f4125391062 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java @@ -21,8 +21,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.ImmutableDruidServer; @@ -38,6 +36,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -92,9 +91,9 @@ public class DruidCoordinatorBalancerTest "datasource1", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 11L @@ -103,9 +102,9 @@ public class DruidCoordinatorBalancerTest "datasource1", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 7L @@ -114,9 +113,9 @@ public class DruidCoordinatorBalancerTest "datasource2", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 4L @@ -125,9 +124,9 @@ public class DruidCoordinatorBalancerTest "datasource2", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 8L diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java index c16f4b3f20d..0996058b4ad 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorRuleRunnerTest.java @@ -21,8 +21,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.DruidServer; @@ -48,7 +46,9 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -78,16 +78,16 @@ public class DruidCoordinatorRuleRunnerTest databaseRuleManager = EasyMock.createMock(MetadataRuleManager.class); DateTime start = DateTimes.of("2012-01-01"); - availableSegments = Lists.newArrayList(); + availableSegments = new ArrayList<>(); for (int i = 0; i < 24; i++) { availableSegments.add( new DataSegment( "test", new Interval(start, start.plusHours(1)), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 @@ -937,7 +937,7 @@ public class DruidCoordinatorRuleRunnerTest LoadQueuePeon anotherMockPeon = EasyMock.createMock(LoadQueuePeon.class); EasyMock.expect(anotherMockPeon.getLoadQueueSize()).andReturn(10L).atLeastOnce(); - EasyMock.expect(anotherMockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).anyTimes(); + EasyMock.expect(anotherMockPeon.getSegmentsToLoad()).andReturn(new HashSet<>()).anyTimes(); EasyMock.replay(anotherMockPeon); @@ -1065,9 +1065,9 @@ public class DruidCoordinatorRuleRunnerTest "test", Intervals.of("2012-02-01/2012-02-02"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 1, 0 @@ -1224,9 +1224,9 @@ public class DruidCoordinatorRuleRunnerTest "test", Intervals.of("2012-02-01/2012-02-02"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 1, 0 @@ -1310,9 +1310,9 @@ public class DruidCoordinatorRuleRunnerTest "test", Intervals.of("2012-01-01/2012-01-02"), "1", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 @@ -1321,9 +1321,9 @@ public class DruidCoordinatorRuleRunnerTest "test", Intervals.of("2012-01-01/2012-01-02"), "2", - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), IndexIO.CURRENT_VERSION_ID, 1 diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java index 976493f1a43..2f710411377 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorSegmentMergerTest.java @@ -21,7 +21,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import org.apache.druid.client.indexing.IndexingServiceClient; import org.apache.druid.client.indexing.NoopIndexingServiceClient; import org.apache.druid.common.config.JacksonConfigManager; @@ -34,6 +33,7 @@ import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Test; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicReference; @@ -455,7 +455,7 @@ public class DruidCoordinatorSegmentMergerTest .andReturn(new AtomicReference(null)).anyTimes(); EasyMock.replay(configManager); - final List> retVal = Lists.newArrayList(); + final List> retVal = new ArrayList<>(); final IndexingServiceClient indexingServiceClient = new NoopIndexingServiceClient() { @Override diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorTest.java index b2692258bc1..0ad3b7fd2aa 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; import it.unimi.dsi.fastutil.objects.Object2LongMap; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.cache.PathChildrenCache; @@ -65,6 +64,7 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -583,8 +583,8 @@ public class DruidCoordinatorTest extends CuratorTestBase interval, "dummy_version", new ConcurrentHashMap<>(), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), null, 0, 0L diff --git a/server/src/test/java/org/apache/druid/server/coordinator/ReservoirSegmentSamplerTest.java b/server/src/test/java/org/apache/druid/server/coordinator/ReservoirSegmentSamplerTest.java index 323104bde46..815fd45187d 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/ReservoirSegmentSamplerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/ReservoirSegmentSamplerTest.java @@ -21,7 +21,6 @@ package org.apache.druid.server.coordinator; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.client.ImmutableDruidServer; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.timeline.DataSegment; @@ -33,6 +32,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -81,9 +82,9 @@ public class ReservoirSegmentSamplerTest "datasource1", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 11L @@ -92,9 +93,9 @@ public class ReservoirSegmentSamplerTest "datasource1", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 7L @@ -103,9 +104,9 @@ public class ReservoirSegmentSamplerTest "datasource2", new Interval(start1, start1.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 4L @@ -114,9 +115,9 @@ public class ReservoirSegmentSamplerTest "datasource2", new Interval(start2, start2.plusHours(1)), version.toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 8L @@ -188,13 +189,13 @@ public class ReservoirSegmentSamplerTest EasyMock.expect(holder4.getServer()).andReturn(druidServer4).anyTimes(); EasyMock.replay(holder4); - List holderList = Lists.newArrayList(); + List holderList = new ArrayList<>(); holderList.add(holder1); holderList.add(holder2); holderList.add(holder3); holderList.add(holder4); - Map segmentCountMap = Maps.newHashMap(); + Map segmentCountMap = new HashMap<>(); for (int i = 0; i < 5000; i++) { segmentCountMap.put(ReservoirSegmentSampler.getRandomBalancerSegmentHolder(holderList).getSegment(), 1); } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java b/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java index 6fd96ed64bb..4a647f39821 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java @@ -19,7 +19,6 @@ package org.apache.druid.server.coordinator.cost; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -186,8 +185,8 @@ public class SegmentsCostCacheTest interval, UUID.randomUUID().toString(), new ConcurrentHashMap<>(), - Lists.newArrayList(), - Lists.newArrayList(), + new ArrayList<>(), + new ArrayList<>(), null, 0, size diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index 77eaf1318e6..045b160ebe8 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -22,7 +22,6 @@ package org.apache.druid.server.coordinator.rules; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.client.DruidServer; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; @@ -38,7 +37,9 @@ import org.apache.druid.timeline.partition.NoneShardSpec; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.TreeSet; import java.util.stream.Collectors; @@ -52,10 +53,10 @@ public class BroadcastDistributionRuleTest { private DruidCluster druidCluster; private ServerHolder holderOfSmallSegment; - private List holdersOfLargeSegments = Lists.newArrayList(); - private List holdersOfLargeSegments2 = Lists.newArrayList(); - private final List largeSegments = Lists.newArrayList(); - private final List largeSegments2 = Lists.newArrayList(); + private List holdersOfLargeSegments = new ArrayList<>(); + private List holdersOfLargeSegments2 = new ArrayList<>(); + private final List largeSegments = new ArrayList<>(); + private final List largeSegments2 = new ArrayList<>(); private DataSegment smallSegment; @Before @@ -65,9 +66,9 @@ public class BroadcastDistributionRuleTest "small_source", Intervals.of("0/1000"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0 @@ -79,9 +80,9 @@ public class BroadcastDistributionRuleTest "large_source", Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 100 @@ -95,9 +96,9 @@ public class BroadcastDistributionRuleTest "large_source2", Intervals.of((i * 1000) + "/" + ((i + 1) * 1000)), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 100 diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java index 86c1b73d5a2..1980b9799b6 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java @@ -23,9 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import org.apache.druid.client.DruidServer; @@ -58,8 +55,11 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -691,9 +691,9 @@ public class LoadRuleTest dataSource, Intervals.of("0/3000"), DateTimes.nowUtc().toString(), - Maps.newHashMap(), - Lists.newArrayList(), - Lists.newArrayList(), + new HashMap<>(), + new ArrayList<>(), + new ArrayList<>(), NoneShardSpec.instance(), 0, 0 @@ -739,8 +739,8 @@ public class LoadRuleTest private static LoadQueuePeon createEmptyPeon() { final LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class); - EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(Sets.newHashSet()).anyTimes(); - EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); + EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(new HashSet<>()).anyTimes(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(new HashSet<>()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(0L).anyTimes(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(0).anyTimes(); @@ -754,7 +754,7 @@ public class LoadRuleTest final LoadQueuePeon mockPeon = EasyMock.createMock(LoadQueuePeon.class); EasyMock.expect(mockPeon.getSegmentsToLoad()).andReturn(segs).anyTimes(); - EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(Sets.newHashSet()).anyTimes(); + EasyMock.expect(mockPeon.getSegmentsMarkedToDrop()).andReturn(new HashSet<>()).anyTimes(); EasyMock.expect(mockPeon.getLoadQueueSize()).andReturn(loadingSize).anyTimes(); EasyMock.expect(mockPeon.getNumberOfSegmentsInQueue()).andReturn(segs.size()).anyTimes(); diff --git a/server/src/test/java/org/apache/druid/server/shard/SingleDimensionShardSpecTest.java b/server/src/test/java/org/apache/druid/server/shard/SingleDimensionShardSpecTest.java index 0111c3a9b00..2722179973e 100644 --- a/server/src/test/java/org/apache/druid/server/shard/SingleDimensionShardSpecTest.java +++ b/server/src/test/java/org/apache/druid/server/shard/SingleDimensionShardSpecTest.java @@ -23,7 +23,6 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableRangeSet; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; @@ -171,7 +170,7 @@ public class SingleDimensionShardSpecTest { Preconditions.checkState(arguments.length % 2 == 0); - final ArrayList>> retVal = Lists.newArrayList(); + final ArrayList>> retVal = new ArrayList<>(); for (int i = 0; i < arguments.length; i += 2) { retVal.add(Pair.of((Boolean) arguments[i], makeMap((String) arguments[i + 1]))); diff --git a/server/src/test/java/org/apache/druid/timeline/DataSegmentTest.java b/server/src/test/java/org/apache/druid/timeline/DataSegmentTest.java index 6eaf103995d..6c9c88fcd6c 100644 --- a/server/src/test/java/org/apache/druid/timeline/DataSegmentTest.java +++ b/server/src/test/java/org/apache/druid/timeline/DataSegmentTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Sets; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; @@ -42,6 +41,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; /** */ @@ -195,7 +195,7 @@ public class DataSegmentTest List shuffled = new ArrayList<>(Arrays.asList(sortedOrder)); Collections.shuffle(shuffled); - Set theSet = Sets.newTreeSet(DataSegment.bucketMonthComparator()); + Set theSet = new TreeSet<>(DataSegment.bucketMonthComparator()); theSet.addAll(shuffled); int index = 0; diff --git a/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java b/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java index fe256b5380f..6ebcdddc0d0 100644 --- a/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java +++ b/services/src/main/java/org/apache/druid/cli/CliHadoopIndexer.java @@ -20,7 +20,6 @@ package org.apache.druid.cli; import com.google.common.base.Joiner; -import com.google.common.collect.Lists; import com.google.inject.Inject; import io.airlift.airline.Arguments; import io.airlift.airline.Command; @@ -34,6 +33,7 @@ import java.io.File; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -70,7 +70,7 @@ public class CliHadoopIndexer implements Runnable public void run() { try { - final List allCoordinates = Lists.newArrayList(); + final List allCoordinates = new ArrayList<>(); if (coordinates != null) { allCoordinates.addAll(coordinates); } @@ -78,16 +78,16 @@ public class CliHadoopIndexer implements Runnable allCoordinates.addAll(DEFAULT_HADOOP_COORDINATES); } - final List extensionURLs = Lists.newArrayList(); + final List extensionURLs = new ArrayList<>(); for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) { final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false); extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs())); } - final List nonHadoopURLs = Lists.newArrayList(); + final List nonHadoopURLs = new ArrayList<>(); nonHadoopURLs.addAll(Arrays.asList(((URLClassLoader) CliHadoopIndexer.class.getClassLoader()).getURLs())); - final List driverURLs = Lists.newArrayList(); + final List driverURLs = new ArrayList<>(); driverURLs.addAll(nonHadoopURLs); // put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts for (File hadoopDependency : Initialization.getHadoopDependencyFilesToLoad(allCoordinates, extensionsConfig)) { @@ -98,7 +98,7 @@ public class CliHadoopIndexer implements Runnable final URLClassLoader loader = new URLClassLoader(driverURLs.toArray(new URL[0]), null); Thread.currentThread().setContextClassLoader(loader); - final List jobUrls = Lists.newArrayList(); + final List jobUrls = new ArrayList<>(); jobUrls.addAll(nonHadoopURLs); jobUrls.addAll(extensionURLs); diff --git a/services/src/main/java/org/apache/druid/cli/CliInternalHadoopIndexer.java b/services/src/main/java/org/apache/druid/cli/CliInternalHadoopIndexer.java index 188e5add483..7a0d9bb5f34 100644 --- a/services/src/main/java/org/apache/druid/cli/CliInternalHadoopIndexer.java +++ b/services/src/main/java/org/apache/druid/cli/CliInternalHadoopIndexer.java @@ -23,7 +23,6 @@ import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.TypeLiteral; @@ -49,6 +48,7 @@ import org.apache.druid.metadata.MetadataStorageTablesConfig; import java.io.File; import java.net.URI; import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.List; import java.util.Properties; @@ -119,7 +119,7 @@ public class CliInternalHadoopIndexer extends GuiceRunnable ) ); - List jobs = Lists.newArrayList(); + List jobs = new ArrayList<>(); jobs.add(new HadoopDruidDetermineConfigurationJob(config)); jobs.add(new HadoopDruidIndexerJob(config, injector.getInstance(MetadataStorageUpdaterJobHandler.class))); JobHelper.runJobs(jobs, config); diff --git a/services/src/main/java/org/apache/druid/cli/DumpSegment.java b/services/src/main/java/org/apache/druid/cli/DumpSegment.java index 84b178c6e3a..06f90adef4e 100644 --- a/services/src/main/java/org/apache/druid/cli/DumpSegment.java +++ b/services/src/main/java/org/apache/druid/cli/DumpSegment.java @@ -25,7 +25,6 @@ import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.util.concurrent.MoreExecutors; @@ -90,7 +89,9 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.util.ArrayList; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -142,7 +143,7 @@ public class DumpSegment extends GuiceRunnable title = "column", description = "Column to include, specify multiple times for multiple columns, or omit to include all columns.", required = false) - public List columnNamesFromCli = Lists.newArrayList(); + public List columnNamesFromCli = new ArrayList<>(); @Option( name = "--time-iso8601", @@ -486,7 +487,7 @@ public class DumpSegment extends GuiceRunnable return factory .getToolchest() .mergeResults(factory.mergeRunners(MoreExecutors.sameThreadExecutor(), ImmutableList.of(runner))) - .run(QueryPlus.wrap(query), Maps.newHashMap()); + .run(QueryPlus.wrap(query), new HashMap<>()); } private static void evaluateSequenceForSideEffects(final Sequence sequence) diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java b/services/src/main/java/org/apache/druid/cli/PullDependencies.java index cc835b076a5..4df077b745b 100644 --- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java +++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java @@ -22,8 +22,6 @@ package org.apache.druid.cli; import com.google.common.base.Strings; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import com.google.inject.Inject; import io.airlift.airline.Command; import io.airlift.airline.Option; @@ -60,6 +58,8 @@ import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.stream.Collectors; @@ -73,7 +73,7 @@ public class PullDependencies implements Runnable { private static final Logger log = new Logger(PullDependencies.class); - private static final Set exclusions = Sets.newHashSet( + private static final Set exclusions = new HashSet<>( /* // It is possible that extensions will pull down a lot of jars that are either @@ -157,14 +157,14 @@ public class PullDependencies implements Runnable title = "coordinate", description = "Extension coordinate to pull down, followed by a maven coordinate, e.g. org.apache.druid.extensions:mysql-metadata-storage", required = false) - public List coordinates = Lists.newArrayList(); + public List coordinates = new ArrayList<>(); @Option( name = {"-h", "--hadoop-coordinate"}, title = "hadoop coordinate", description = "Hadoop dependency to pull down, followed by a maven coordinate, e.g. org.apache.hadoop:hadoop-client:2.4.0", required = false) - public List hadoopCoordinates = Lists.newArrayList(); + public List hadoopCoordinates = new ArrayList<>(); @Option( name = "--no-default-hadoop", @@ -190,7 +190,7 @@ public class PullDependencies implements Runnable title = "Add a remote repository. Unless --no-default-remote-repositories is provided, these will be used after https://repo1.maven.org/maven2/", required = false ) - List remoteRepositories = Lists.newArrayList(); + List remoteRepositories = new ArrayList<>(); @Option( name = "--no-default-remote-repositories", @@ -430,13 +430,13 @@ public class PullDependencies implements Runnable alongside anything else that's grabbing System.out. But who knows. */ - final List remoteUriList = Lists.newArrayList(); + final List remoteUriList = new ArrayList<>(); if (!noDefaultRemoteRepositories) { remoteUriList.addAll(DEFAULT_REMOTE_REPOSITORIES); } remoteUriList.addAll(remoteRepositories); - List remoteRepositories = Lists.newArrayList(); + List remoteRepositories = new ArrayList<>(); for (String uri : remoteUriList) { try { URI u = new URI(uri); diff --git a/services/src/main/java/org/apache/druid/cli/convert/DataSegmentPusherDefaultConverter.java b/services/src/main/java/org/apache/druid/cli/convert/DataSegmentPusherDefaultConverter.java index b7f3569339e..051c96c7075 100644 --- a/services/src/main/java/org/apache/druid/cli/convert/DataSegmentPusherDefaultConverter.java +++ b/services/src/main/java/org/apache/druid/cli/convert/DataSegmentPusherDefaultConverter.java @@ -20,9 +20,9 @@ package org.apache.druid.cli.convert; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; @@ -56,7 +56,7 @@ public class DataSegmentPusherDefaultConverter implements PropertyConverter } // It's an s3 property, which means we need to set the type and convert the other values. - Map retVal = Maps.newHashMap(); + Map retVal = new HashMap<>(); retVal.put("druid.pusher.type", type); retVal.putAll(new Rename("druid.pusher.s3.bucket", "druid.storage.bucket").convert(props)); diff --git a/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java b/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java index 7a6150a190a..7e538391c31 100644 --- a/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java +++ b/services/src/test/java/org/apache/druid/cli/PullDependenciesTest.java @@ -21,7 +21,6 @@ package org.apache.druid.cli; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import io.tesla.aether.internal.DefaultTeslaAether; import org.apache.druid.guice.ExtensionsConfig; import org.apache.druid.java.util.common.StringUtils; @@ -36,6 +35,7 @@ import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; @@ -115,7 +115,7 @@ public class PullDependenciesTest private List getArtifactsForExtension(Artifact artifact) { final List jarNames = extensionToJars.get(artifact); - final List artifacts = Lists.newArrayList(); + final List artifacts = new ArrayList<>(); for (String jarName : jarNames) { final File jarFile = new File(localRepo, jarName); try { diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java index 4e1c5f434ec..c6d17f1efa0 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidConnection.java @@ -85,7 +85,7 @@ public class DruidConnection // remove sensitive fields from the context, only the connection's context needs to have authentication // credentials - Map sanitizedContext = Maps.newHashMap(); + Map sanitizedContext = new HashMap<>(); sanitizedContext = Maps.filterEntries( context, new Predicate>() diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/Aggregation.java b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/Aggregation.java index 6ad0caf7801..559af3eeaad 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/Aggregation.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/aggregation/Aggregation.java @@ -22,8 +22,6 @@ package org.apache.druid.sql.calcite.aggregation; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.ISE; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -36,7 +34,9 @@ import org.apache.druid.sql.calcite.filtration.Filtration; import org.apache.druid.sql.calcite.table.RowSignature; import javax.annotation.Nullable; +import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -166,7 +166,7 @@ public class Aggregation // Verify that this Aggregation contains all input to its postAggregator. // If not, this "filter" call won't work right. final Set dependentFields = postAggregator.getDependentFields(); - final Set aggregatorNames = Sets.newHashSet(); + final Set aggregatorNames = new HashSet<>(); for (AggregatorFactory aggregatorFactory : aggregatorFactories) { aggregatorNames.add(aggregatorFactory.getName()); } @@ -181,7 +181,7 @@ public class Aggregation .optimizeFilterOnly(sourceRowSignature) .getDimFilter(); - final List newAggregators = Lists.newArrayList(); + final List newAggregators = new ArrayList<>(); for (AggregatorFactory agg : aggregatorFactories) { if (agg instanceof FilteredAggregatorFactory) { final FilteredAggregatorFactory filteredAgg = (FilteredAggregatorFactory) agg; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/Expressions.java index e8b0ffb9c4f..7d32108f643 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/Expressions.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/Expressions.java @@ -21,7 +21,6 @@ package org.apache.druid.sql.calcite.expression; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; import org.apache.calcite.jdbc.JavaTypeFactoryImpl; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rex.RexCall; @@ -233,7 +232,7 @@ public class Expressions } else if (kind == SqlKind.AND || kind == SqlKind.OR || kind == SqlKind.NOT) { - final List filters = Lists.newArrayList(); + final List filters = new ArrayList<>(); for (final RexNode rexNode : ((RexCall) expression).getOperands()) { final DimFilter nextFilter = toFilter( plannerContext, diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/ExtractionFns.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/ExtractionFns.java index 8d5e91053bf..985a3f409ef 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/ExtractionFns.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/ExtractionFns.java @@ -19,12 +19,12 @@ package org.apache.druid.sql.calcite.expression; -import com.google.common.collect.Lists; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.query.extraction.CascadeExtractionFn; import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.extraction.TimeFormatExtractionFn; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -84,7 +84,7 @@ public class ExtractionFns } else if (g == null) { return f; } else { - final List extractionFns = Lists.newArrayList(); + final List extractionFns = new ArrayList<>(); // Apply g, then f, unwrapping if they are already cascades. diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/BottomUpTransform.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/BottomUpTransform.java index 10d7b007eb0..9aacde425ab 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/BottomUpTransform.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/BottomUpTransform.java @@ -21,12 +21,12 @@ package org.apache.druid.sql.calcite.filtration; import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; import org.apache.druid.query.filter.AndDimFilter; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.NotDimFilter; import org.apache.druid.query.filter.OrDimFilter; +import java.util.ArrayList; import java.util.List; public abstract class BottomUpTransform implements Function @@ -54,7 +54,7 @@ public abstract class BottomUpTransform implements Function oldFilters = ((AndDimFilter) filter).getFields(); - final List newFilters = Lists.newArrayList(); + final List newFilters = new ArrayList<>(); for (DimFilter oldFilter : oldFilters) { final DimFilter newFilter = apply0(oldFilter); if (newFilter != null) { @@ -68,7 +68,7 @@ public abstract class BottomUpTransform implements Function oldFilters = ((OrDimFilter) filter).getFields(); - final List newFilters = Lists.newArrayList(); + final List newFilters = new ArrayList<>(); for (DimFilter oldFilter : oldFilters) { final DimFilter newFilter = apply0(oldFilter); if (newFilter != null) { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/CombineAndSimplifyBounds.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/CombineAndSimplifyBounds.java index 96d1c0d48a8..a8eb0a59e98 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/CombineAndSimplifyBounds.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/CombineAndSimplifyBounds.java @@ -21,7 +21,6 @@ package org.apache.druid.sql.calcite.filtration; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import org.apache.druid.java.util.common.ISE; @@ -32,6 +31,7 @@ import org.apache.druid.query.filter.NotDimFilter; import org.apache.druid.query.filter.OrDimFilter; import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -128,7 +128,7 @@ public class CombineAndSimplifyBounds extends BottomUpTransform final List newChildren = Lists.newArrayList(children); // Group Bound filters by dimension, extractionFn, and comparator and compute a RangeSet for each one. - final Map> bounds = Maps.newHashMap(); + final Map> bounds = new HashMap<>(); final Iterator iterator = newChildren.iterator(); while (iterator.hasNext()) { @@ -157,7 +157,7 @@ public class CombineAndSimplifyBounds extends BottomUpTransform final BoundRefKey boundRefKey = BoundRefKey.from(bound); List filterList = bounds.get(boundRefKey); if (filterList == null) { - filterList = Lists.newArrayList(); + filterList = new ArrayList<>(); bounds.put(boundRefKey, filterList); } filterList.add(bound); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java index a85d7c5e789..a594bbf0b6d 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/ConvertSelectorsToIns.java @@ -20,7 +20,6 @@ package org.apache.druid.sql.calcite.filtration; import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.druid.java.util.common.ISE; import org.apache.druid.query.filter.DimFilter; import org.apache.druid.query.filter.InDimFilter; @@ -29,6 +28,8 @@ import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.sql.calcite.expression.SimpleExtraction; import org.apache.druid.sql.calcite.table.RowSignature; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,7 +55,7 @@ public class ConvertSelectorsToIns extends BottomUpTransform final List children = Lists.newArrayList(((OrDimFilter) filter).getFields()); // Group filters by dimension and extractionFn. - final Map> selectors = Maps.newHashMap(); + final Map> selectors = new HashMap<>(); for (DimFilter child : children) { if (child instanceof SelectorDimFilter) { @@ -67,7 +68,7 @@ public class ConvertSelectorsToIns extends BottomUpTransform ); List filterList = selectors.get(boundRefKey); if (filterList == null) { - filterList = Lists.newArrayList(); + filterList = new ArrayList<>(); selectors.put(boundRefKey, filterList); } filterList.add(selector); @@ -79,7 +80,7 @@ public class ConvertSelectorsToIns extends BottomUpTransform final List filterList = entry.getValue(); if (filterList.size() > 1) { // We found a simplification. Remove the old filters and add new ones. - final List values = Lists.newArrayList(); + final List values = new ArrayList<>(); for (final SelectorDimFilter selector : filterList) { values.add(selector.getValue()); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/MoveTimeFiltersToIntervals.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/MoveTimeFiltersToIntervals.java index 6ea3784bcbe..b374d252374 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/MoveTimeFiltersToIntervals.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/MoveTimeFiltersToIntervals.java @@ -21,7 +21,6 @@ package org.apache.druid.sql.calcite.filtration; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import org.apache.druid.java.util.common.Pair; @@ -33,6 +32,7 @@ import org.apache.druid.query.filter.OrDimFilter; import org.apache.druid.query.ordering.StringComparators; import org.apache.druid.segment.column.ColumnHolder; +import java.util.ArrayList; import java.util.List; public class MoveTimeFiltersToIntervals implements Function @@ -85,8 +85,8 @@ public class MoveTimeFiltersToIntervals implements Function children = ((AndDimFilter) filter).getFields(); - final List newChildren = Lists.newArrayList(); - final List> rangeSets = Lists.newArrayList(); + final List newChildren = new ArrayList<>(); + final List> rangeSets = new ArrayList<>(); for (DimFilter child : children) { final Pair> pair = extractConvertibleTimeBounds(child); @@ -113,7 +113,7 @@ public class MoveTimeFiltersToIntervals implements Function children = ((OrDimFilter) filter).getFields(); - final List> rangeSets = Lists.newArrayList(); + final List> rangeSets = new ArrayList<>(); boolean allCompletelyConverted = true; boolean allHadIntervals = true; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/RangeSets.java b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/RangeSets.java index 257441dbe5a..17bae150834 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/filtration/RangeSets.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/filtration/RangeSets.java @@ -21,13 +21,13 @@ package org.apache.druid.sql.calcite.filtration; import com.google.common.collect.BoundType; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; import com.google.common.collect.Range; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; import org.apache.druid.java.util.common.Intervals; import org.joda.time.Interval; +import java.util.ArrayList; import java.util.List; public class RangeSets @@ -109,7 +109,7 @@ public class RangeSets public static List toIntervals(final RangeSet rangeSet) { - final List retVal = Lists.newArrayList(); + final List retVal = new ArrayList<>(); for (Range range : rangeSet.asRanges()) { final long start; diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java index 1028d8d004b..13790a93f3a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidOperatorTable.java @@ -21,7 +21,6 @@ package org.apache.druid.sql.calcite.planner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import com.google.inject.Inject; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlFunctionCategory; @@ -73,6 +72,7 @@ import org.apache.druid.sql.calcite.expression.builtin.TrimOperatorConversion; import org.apache.druid.sql.calcite.expression.builtin.TruncateOperatorConversion; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -183,8 +183,8 @@ public class DruidOperatorTable implements SqlOperatorTable final Set operatorConversions ) { - this.aggregators = Maps.newHashMap(); - this.operatorConversions = Maps.newHashMap(); + this.aggregators = new HashMap<>(); + this.operatorConversions = new HashMap<>(); for (SqlAggregator aggregator : aggregators) { final OperatorKey operatorKey = OperatorKey.of(aggregator.calciteFunction()); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java index 1d1c1f65b1d..96a091f26ec 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidPlanner.java @@ -25,7 +25,6 @@ import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; -import com.google.common.collect.Sets; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.interpreter.BindableConvention; @@ -68,6 +67,7 @@ import javax.annotation.Nullable; import javax.servlet.http.HttpServletRequest; import java.io.Closeable; import java.util.ArrayList; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; @@ -232,7 +232,7 @@ public class DruidPlanner implements Closeable final AuthenticationResult authenticationResult ) { - Set datasourceNames = Sets.newHashSet(); + Set datasourceNames = new HashSet<>(); rel.childrenAccept( new RelVisitor() { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java index 2a10ac7314b..6f0c51bbf4a 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/PlannerContext.java @@ -21,7 +21,6 @@ package org.apache.druid.sql.calcite.planner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.calcite.DataContext; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.linq4j.QueryProvider; @@ -33,6 +32,7 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; +import java.util.HashMap; import java.util.Map; /** @@ -67,7 +67,7 @@ public class PlannerContext this.operatorTable = operatorTable; this.macroTable = macroTable; this.plannerConfig = Preconditions.checkNotNull(plannerConfig, "plannerConfig"); - this.queryContext = queryContext != null ? Maps.newHashMap(queryContext) : Maps.newHashMap(); + this.queryContext = queryContext != null ? new HashMap<>(queryContext) : new HashMap<>(); this.localNow = Preconditions.checkNotNull(localNow, "localNow"); } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java index bbba989857d..09cc938c385 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidQuery.java @@ -25,7 +25,6 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.rel.RelFieldCollation; @@ -87,6 +86,7 @@ import org.apache.druid.sql.calcite.table.RowSignature; import javax.annotation.Nullable; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeSet; @@ -801,7 +801,7 @@ public class DruidQuery postAggregators.addAll(sortProject.getPostAggregators()); } - final Map theContext = Maps.newHashMap(); + final Map theContext = new HashMap<>(); theContext.put("skipEmptyBuckets", true); theContext.putAll(plannerContext.getQueryContext()); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java index d9b6d9d0e1a..5a8dd3813a0 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/Grouping.java @@ -20,7 +20,6 @@ package org.apache.druid.sql.calcite.rel; import com.google.common.collect.ImmutableList; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.ISE; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.PostAggregator; @@ -31,6 +30,7 @@ import org.apache.druid.sql.calcite.aggregation.DimensionExpression; import org.apache.druid.sql.calcite.table.RowSignature; import javax.annotation.Nullable; +import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; @@ -56,7 +56,7 @@ public class Grouping this.outputRowSignature = outputRowSignature; // Verify no collisions. - final Set seen = Sets.newHashSet(); + final Set seen = new HashSet<>(); for (DimensionExpression dimensionExpression : dimensions) { if (!seen.add(dimensionExpression.getOutputName())) { throw new ISE("Duplicate field name: %s", dimensionExpression.getOutputName()); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/QueryMaker.java b/sql/src/main/java/org/apache/druid/sql/calcite/rel/QueryMaker.java index a04280e30c9..54a1aa1d7d8 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/QueryMaker.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/QueryMaker.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; -import com.google.common.collect.Maps; import com.google.common.primitives.Ints; import org.apache.calcite.avatica.ColumnMetaData; import org.apache.calcite.rel.type.RelDataTypeField; @@ -64,6 +63,7 @@ import org.joda.time.DateTime; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -145,7 +145,7 @@ public class QueryMaker // SQL row column index -> Scan query column index final int[] columnMapping = new int[outputRowSignature.getRowOrder().size()]; - final Map scanColumnOrder = Maps.newHashMap(); + final Map scanColumnOrder = new HashMap<>(); for (int i = 0; i < query.getColumns().size(); i++) { scanColumnOrder.put(query.getColumns().get(i), i); diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/table/RowSignature.java b/sql/src/main/java/org/apache/druid/sql/calcite/table/RowSignature.java index 53feeca1c90..c6051c8d784 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/table/RowSignature.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/table/RowSignature.java @@ -22,8 +22,6 @@ package org.apache.druid.sql.calcite.table; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; @@ -39,6 +37,8 @@ import org.apache.druid.sql.calcite.expression.SimpleExtraction; import org.apache.druid.sql.calcite.planner.Calcites; import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -54,7 +54,7 @@ public class RowSignature private RowSignature(final List> columnTypeList) { - final Map columnTypes0 = Maps.newHashMap(); + final Map columnTypes0 = new HashMap<>(); final ImmutableList.Builder columnNamesBuilder = ImmutableList.builder(); int i = 0; @@ -228,7 +228,7 @@ public class RowSignature private Builder() { - this.columnTypeList = Lists.newArrayList(); + this.columnTypeList = new ArrayList<>(); } public Builder add(String columnName, ValueType columnType) diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index ff8dd2a5877..1b92d0db142 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -24,8 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; @@ -87,6 +85,7 @@ import java.sql.Statement; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; @@ -840,9 +839,9 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase { try { final ResultSetMetaData metaData = resultSet.getMetaData(); - final List> rows = Lists.newArrayList(); + final List> rows = new ArrayList<>(); while (resultSet.next()) { - final Map row = Maps.newHashMap(); + final Map row = new HashMap<>(); for (int i = 0; i < metaData.getColumnCount(); i++) { if (returnKeys == null || returnKeys.contains(metaData.getColumnLabel(i + 1))) { row.put(metaData.getColumnLabel(i + 1), resultSet.getObject(i + 1)); @@ -859,7 +858,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase private static Map ROW(final Pair... entries) { - final Map m = Maps.newHashMap(); + final Map m = new HashMap<>(); for (Pair entry : entries) { m.put(entry.lhs, entry.rhs); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 4316ec05608..9261e71679c 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -22,7 +22,6 @@ package org.apache.druid.sql.calcite; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; -import com.google.common.collect.Maps; import org.apache.calcite.plan.RelOptPlanner; import org.apache.druid.common.config.NullHandling; import org.apache.druid.hll.VersionOneHyperLogLogCollector; @@ -134,6 +133,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -248,7 +248,7 @@ public class CalciteQueryTest extends CalciteTestBase ); // Matches QUERY_CONTEXT_LOS_ANGELES - public static final Map TIMESERIES_CONTEXT_LOS_ANGELES = Maps.newHashMap(); + public static final Map TIMESERIES_CONTEXT_LOS_ANGELES = new HashMap<>(); { TIMESERIES_CONTEXT_LOS_ANGELES.put(PlannerContext.CTX_SQL_CURRENT_TIMESTAMP, "2000-01-01T00:00:00Z"); diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java index 0bea3a5bcee..2b78904c5c4 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/CalciteTests.java @@ -26,7 +26,6 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Maps; import com.google.inject.Binder; import com.google.inject.Guice; import com.google.inject.Injector; @@ -133,6 +132,7 @@ import org.joda.time.chrono.ISOChronology; import java.io.File; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -174,7 +174,7 @@ public class CalciteTests public static final AuthenticatorMapper TEST_AUTHENTICATOR_MAPPER; static { - final Map defaultMap = Maps.newHashMap(); + final Map defaultMap = new HashMap<>(); defaultMap.put( AuthConfig.ALLOW_ALL_NAME, new AllowAllAuthenticator() diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java b/sql/src/test/java/org/apache/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java index d5ac4ceac26..55fff600da8 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java @@ -21,8 +21,6 @@ package org.apache.druid.sql.calcite.util; import com.google.common.base.Function; import com.google.common.collect.Iterables; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.io.Closeables; import com.google.common.util.concurrent.MoreExecutors; @@ -55,15 +53,17 @@ import org.joda.time.Interval; import java.io.Closeable; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; public class SpecificSegmentsQuerySegmentWalker implements QuerySegmentWalker, Closeable { private final QueryRunnerFactoryConglomerate conglomerate; - private final Map> timelines = Maps.newHashMap(); - private final List closeables = Lists.newArrayList(); - private final List segments = Lists.newArrayList(); + private final Map> timelines = new HashMap<>(); + private final List closeables = new ArrayList<>(); + private final List segments = new ArrayList<>(); public SpecificSegmentsQuerySegmentWalker(QueryRunnerFactoryConglomerate conglomerate) {