From c0beb78ffdb5807ceea8a340838eab01d6a18d1d Mon Sep 17 00:00:00 2001 From: Roman Leventov Date: Fri, 21 Jul 2017 18:26:59 +0300 Subject: [PATCH] Enforce brace formatting with Checkstyle (#4564) --- .../java/io/druid/data/input/MapBasedRow.java | 9 +- .../data/input/impl/DimensionSchema.java | 5 +- .../PrefetchableTextFilesFirehoseFactory.java | 10 +- .../druid/data/input/impl/TimestampSpec.java | 3 +- .../io/druid/guice/JsonConfigProvider.java | 6 +- .../java/io/druid/guice/JsonConfigurator.java | 15 ++- .../java/io/druid/guice/LifecycleModule.java | 3 +- .../java/io/druid/guice/LifecycleScope.java | 3 +- .../main/java/io/druid/guice/PolyBind.java | 12 +-- .../segment/loading/DataSegmentPusher.java | 11 ++- .../io/druid/segment/loading/LoadSpec.java | 11 ++- .../io/druid/timeline/DataSegmentUtils.java | 3 +- .../timeline/partition/NoneShardSpec.java | 14 ++- .../main/java/io/druid/utils/Runnables.java | 8 +- .../data/input/impl/JSONParseSpecTest.java | 13 ++- .../common/aws/AWSCredentialsConfig.java | 5 +- .../druid/common/aws/AWSCredentialsUtils.java | 3 +- ...figDrivenAwsCredentialsConfigProvider.java | 12 ++- .../aws/FileSessionCredentialsProvider.java | 37 ++++--- .../LazyFileSessionCredentialsProvider.java | 9 +- .../benchmark/BitmapIterationBenchmark.java | 3 +- .../druid/benchmark/FlattenJSONBenchmark.java | 3 +- .../datagen/BenchmarkColumnSchema.java | 20 ++-- .../benchmark/query/QueryBenchmarkUtil.java | 4 +- .../benchmark/BenchmarkDataGeneratorTest.java | 30 ++++-- .../FlattenJSONBenchmarkUtilTest.java | 9 +- codestyle/checkstyle-suppressions.xml | 2 + codestyle/checkstyle.xml | 17 ++++ .../collections/DefaultBlockingPool.java | 24 +++-- .../collections/OrderedMergeSequence.java | 15 +-- .../java/io/druid/collections/StupidPool.java | 3 +- .../io/druid/common/config/Log4jShutdown.java | 3 +- .../druid/common/guava/CombiningSequence.java | 3 +- .../java/io/druid/common/utils/PropUtils.java | 7 +- .../io/druid/common/utils/SocketUtil.java | 7 +- .../io/druid/guice/DruidSecondaryModule.java | 3 +- .../io/druid/guice/GuiceInjectableValues.java | 10 +- .../io/druid/guice/JacksonConfigProvider.java | 3 +- .../metadata/DefaultPasswordProvider.java | 3 +- .../ReferenceCountingResourceHolderTest.java | 20 ++-- .../io/druid/common/utils/VMUtilsTest.java | 6 +- .../metadata/DefaultPasswordProviderTest.java | 12 ++- .../druid/extendedset/intset/ConciseSet.java | 82 ++++++--------- .../intset/ImmutableConciseSet.java | 7 +- .../metrics/AmbariMetricsEmitterModule.java | 3 +- .../io/druid/firehose/azure/AzureBlob.java | 15 ++- .../storage/azure/AzureAccountConfig.java | 25 ++++- .../io/druid/storage/azure/AzureStorage.java | 10 +- .../io/druid/storage/azure/AzureTaskLogs.java | 39 +++++--- .../storage/azure/AzureTaskLogsConfig.java | 19 ++-- .../io/druid/storage/azure/AzureUtils.java | 3 +- .../storage/azure/AzureTaskLogsTest.java | 21 ++-- .../cassandra/CassandraDataSegmentPuller.java | 27 ++--- .../cassandra/CassandraDataSegmentPusher.java | 7 +- .../io/druid/firehose/google/GoogleBlob.java | 12 ++- .../StaticGoogleBlobStoreFirehoseFactory.java | 3 +- .../google/GoogleDataSegmentPusher.java | 3 +- .../druid/storage/google/GoogleStorage.java | 3 +- .../druid/storage/google/GoogleTaskLogs.java | 27 +++-- .../storage/google/GoogleTaskLogsConfig.java | 12 ++- .../storage/google/GoogleTaskLogsTest.java | 21 ++-- .../graphite/GraphiteEmitterModule.java | 3 +- .../data/input/orc/OrcExtensionsModule.java | 7 +- .../input/orc/DruidOrcInputFormatTest.java | 3 +- .../input/orc/OrcIndexGeneratorJobTest.java | 3 +- .../query/scan/ScanQueryDruidModule.java | 9 +- .../io/druid/query/scan/ScanQueryEngine.java | 3 +- .../druid/query/scan/ScanQueryRunnerTest.java | 3 +- .../storage/sqlserver/SQLServerConnector.java | 3 +- .../CustomStatementRewriterTest.java | 5 +- .../aggregation/TimestampAggregator.java | 3 +- .../TimestampMaxAggregatorFactory.java | 10 +- .../TimestampMinAggregatorFactory.java | 10 +- .../avro/SchemaRepoBasedAvroBytesDecoder.java | 6 +- .../input/AvroStreamInputRowParserTest.java | 19 ++-- .../security/kerberos/DruidKerberosUtil.java | 3 +- .../storage/hdfs/HdfsKerberosConfig.java | 3 +- .../loading/HdfsDataSegmentFinderTest.java | 3 +- .../histogram/ApproximateHistogram.java | 46 ++++++--- .../histogram/ApproximateHistogramTest.java | 4 +- ...kaLookupExtractorIntrospectionHandler.java | 5 +- .../KafkaLookupExtractorFactoryTest.java | 3 +- .../kafka/KafkaIndexTaskClientTest.java | 4 +- .../namespace/cache/CacheScheduler.java | 4 +- .../NamespaceExtractionCacheManager.java | 3 +- .../io/druid/server/lookup/LoadingLookup.java | 18 ++-- .../io/druid/server/lookup/PollingLookup.java | 5 +- .../lookup/PollingLookupFactoryTest.java | 3 +- .../lookup/PollingLookupSerDeserTest.java | 9 +- .../server/lookup/PollingLookupTest.java | 11 ++- .../cache/loading/LoadingCacheTest.java | 5 +- .../storage/mysql/MySQLConnector.java | 8 +- .../postgresql/PostgreSQLConnector.java | 11 ++- .../input/protobuf/ProtoTestEventWrapper.java | 10 +- .../s3/AWSSessionCredentialsAdapter.java | 23 +++-- .../druid/storage/s3/S3DataSegmentFinder.java | 9 +- .../storage/s3/S3DataSegmentFinderTest.java | 3 +- .../storage/s3/S3DataSegmentMoverTest.java | 9 +- .../storage/s3/S3DataSegmentPusherTest.java | 15 ++- .../TestFileSessionCredentialsProvider.java | 6 +- .../variance/VarianceAggregatorCollector.java | 3 +- .../io/druid/hll/HyperLogLogCollector.java | 3 +- .../java/io/druid/hll/HyperLogLogHash.java | 6 +- .../hll/HyperLogLogCollectorBenchmark.java | 18 ++-- .../druid/hll/HyperLogLogCollectorTest.java | 6 +- .../indexer/HadoopDruidIndexerConfig.java | 3 +- .../druid/indexer/HadoopKerberosConfig.java | 3 +- .../java/io/druid/indexer/InputRowSerde.java | 11 ++- .../druid/indexer/hadoop/SegmentInputRow.java | 3 +- .../indexer/path/GranularityPathSpec.java | 7 +- .../java/io/druid/indexer/JobHelperTest.java | 5 +- .../hadoop/DatasourceInputFormatTest.java | 3 +- .../path/HadoopGlobPathSplitterTest.java | 6 +- .../druid/indexing/common/TaskLocation.java | 3 +- .../io/druid/indexing/common/TaskToolbox.java | 3 +- .../actions/RemoteTaskActionClient.java | 3 +- .../indexing/common/task/MergeTaskBase.java | 3 +- .../tasklogs/TaskRunnerTaskLogStreamer.java | 5 +- .../overlord/HeapMemoryTaskStorage.java | 35 ++++--- .../overlord/ImmutableWorkerInfo.java | 3 +- .../indexing/overlord/RemoteTaskRunner.java | 5 +- .../druid/indexing/overlord/TaskLockbox.java | 20 ++-- .../io/druid/indexing/overlord/TaskQueue.java | 9 +- .../io/druid/indexing/overlord/ZkWorker.java | 9 +- .../config/RemoteTaskRunnerConfig.java | 27 +++-- .../helpers/TaskLogAutoCleanerConfig.java | 3 +- .../executor/ExecutorLifecycleConfig.java | 3 +- .../indexing/overlord/TaskLockboxTest.java | 9 +- .../src/main/java/org/testng/TestNG.java | 26 +++-- .../tests/indexer/AbstractIndexerTest.java | 3 +- .../druid/java/util/common/collect/Utils.java | 15 ++- .../util/common/granularity/Granularity.java | 3 +- .../common/granularity/GranularityType.java | 18 ++-- .../common/guava/FilteringAccumulator.java | 6 +- .../guava/FilteringYieldingAccumulator.java | 6 +- .../util/common/guava/MappingAccumulator.java | 6 +- .../guava/MappingYieldingAccumulator.java | 3 +- .../io/druid/java/util/common/io/Closer.java | 32 +++--- .../common/io/smoosh/SmooshedFileMapper.java | 3 +- .../java/util/common/GranularityTest.java | 9 +- .../common/guava/WithEffectSequenceTest.java | 6 +- .../common/parsers/TimestampParserTest.java | 12 ++- pom.xml | 2 +- .../DruidDefaultSerializersModule.java | 6 +- .../main/java/io/druid/jackson/JodaStuff.java | 3 +- .../io/druid/jackson/SegmentizerModule.java | 3 +- .../java/io/druid/query/AsyncQueryRunner.java | 12 ++- .../druid/query/CPUTimeMetricQueryRunner.java | 3 +- .../query/ChainedExecutionQueryRunner.java | 18 ++-- .../io/druid/query/ConcatQueryRunner.java | 5 +- .../io/druid/query/DruidProcessingConfig.java | 3 +- .../druid/query/FluentQueryRunnerBuilder.java | 6 +- .../java/io/druid/query/QueryDataSource.java | 5 +- .../io/druid/query/QueryRunnerHelper.java | 3 +- .../ResultGranularTimestampComparator.java | 3 +- .../java/io/druid/query/RetryQueryRunner.java | 3 +- .../java/io/druid/query/TableDataSource.java | 8 +- .../java/io/druid/query/TimewarpOperator.java | 6 +- .../io/druid/query/aggregation/Histogram.java | 41 +++++--- .../JavaScriptBufferAggregator.java | 5 +- .../post/ArithmeticPostAggregator.java | 90 ++++++++--------- .../post/ExpressionPostAggregator.java | 3 +- .../query/extraction/TimeDimExtractionFn.java | 24 ++--- .../io/druid/query/filter/BoundDimFilter.java | 3 +- .../io/druid/query/filter/InDimFilter.java | 3 +- .../io/druid/query/filter/NotDimFilter.java | 5 +- .../epinephelinae/ByteBufferIntList.java | 3 +- .../ByteBufferMinMaxOffsetHeap.java | 54 ++++++---- .../query/groupby/epinephelinae/Groupers.java | 3 +- .../epinephelinae/LimitedBufferGrouper.java | 3 +- .../groupby/orderby/DefaultLimitSpec.java | 3 +- .../query/groupby/orderby/NoopLimitSpec.java | 7 +- .../groupby/orderby/OrderByColumnSpec.java | 14 +-- .../query/lookup/LookupExtractionFn.java | 10 +- .../LookupExtractorFactoryContainer.java | 3 +- .../query/lookup/LookupReferencesManager.java | 3 +- .../metadata/SegmentMetadataQueryConfig.java | 5 +- .../query/ordering/StringComparators.java | 99 +++++++------------ .../search/SearchQueryQueryToolChest.java | 12 +-- .../druid/query/select/SelectQueryEngine.java | 7 +- .../select/SelectQueryQueryToolChest.java | 12 +-- .../druid/query/spec/SpecificSegmentSpec.java | 10 +- .../java/io/druid/segment/BitmapOffset.java | 3 +- .../druid/segment/DimensionHandlerUtils.java | 9 +- .../segment/DoubleDimensionMergerV9.java | 6 +- .../druid/segment/FloatDimensionMergerV9.java | 6 +- .../druid/segment/LongDimensionMergerV9.java | 3 +- .../segment/SegmentMissingException.java | 6 +- .../segment/SingleScanTimeDimSelector.java | 17 ++-- .../io/druid/segment/column/DoubleColumn.java | 5 +- .../column/IndexedFloatsGenericColumn.java | 5 +- .../column/IndexedLongsGenericColumn.java | 5 +- .../CompressedDoublesIndexedSupplier.java | 3 +- .../data/CompressedIntsIndexedSupplier.java | 3 +- .../segment/data/CompressionFactory.java | 5 +- .../EntireLayoutDoubleSupplierSerializer.java | 5 +- .../EntireLayoutIndexedDoubleSupplier.java | 3 +- .../FixedSizeCompressedObjectStrategy.java | 3 +- .../segment/data/IntersectingOffset.java | 19 ++-- .../io/druid/segment/data/UnioningOffset.java | 25 ++--- .../io/druid/segment/data/VSizeIndexed.java | 9 +- .../druid/segment/data/VSizeIndexedInts.java | 15 +-- .../filter/ColumnComparisonFilter.java | 6 +- .../io/druid/segment/filter/OrFilter.java | 3 +- .../IncrementalIndexStorageAdapter.java | 3 +- .../incremental/OffheapIncrementalIndex.java | 3 +- .../serde/ComplexColumnPartSupplier.java | 5 +- .../DictionaryEncodedColumnPartSerde.java | 10 +- .../serde/DoubleGenericColumnPartSerde.java | 5 +- .../serde/DoubleGenericColumnSupplier.java | 7 +- .../serde/FloatGenericColumnSupplier.java | 6 +- .../serde/LongGenericColumnSupplier.java | 5 +- .../io/druid/guice/GuiceInjectorsTest.java | 12 ++- .../io/druid/query/AsyncQueryRunnerTest.java | 51 ++++++---- .../IntervalChunkingQueryRunnerTest.java | 15 ++- .../io/druid/query/QueryRunnerTestHelper.java | 10 ++ .../io/druid/query/RetryQueryRunnerTest.java | 68 ++++++------- .../aggregation/AggregationTestHelper.java | 8 +- .../aggregation/FilteredAggregatorTest.java | 6 +- .../aggregation/HistogramAggregatorTest.java | 6 +- .../query/aggregation/HistogramTest.java | 12 ++- .../HyperUniquesAggregatorFactoryTest.java | 3 +- .../SubstringDimExtractionFnTest.java | 3 +- .../query/filter/BoundDimFilterTest.java | 14 ++- .../query/filter/DimFilterUtilsTest.java | 12 ++- .../filter/GetDimensionRangeSetTest.java | 21 ++-- .../query/filter/RegexDimFilterTest.java | 3 +- .../GroupByQueryRunnerFactoryTest.java | 3 +- .../query/groupby/having/HavingSpecTest.java | 30 ++++-- .../groupby/orderby/TopNSequenceTest.java | 6 +- .../query/lookup/LookupExtractionFnTest.java | 3 +- .../query/search/SearchBinaryFnTest.java | 9 +- .../query/select/SelectQueryRunnerTest.java | 6 +- .../TimeBoundaryQueryRunnerTest.java | 3 +- .../topn/AlphaNumericTopNMetricSpecTest.java | 3 +- .../topn/DimensionTopNMetricSpecTest.java | 12 ++- .../druid/query/topn/TopNQueryRunnerTest.java | 85 ++++++++-------- .../java/io/druid/segment/AppendTest.java | 39 ++++---- .../CompressedVSizeIndexedV3SupplierTest.java | 6 +- .../segment/NullDimensionSelectorTest.java | 15 ++- .../druid/segment/SchemalessTestFullTest.java | 85 ++++++++-------- .../segment/StringDimensionHandlerTest.java | 12 ++- .../java/io/druid/segment/TestHelper.java | 3 +- .../data/BenchmarkIndexibleWrites.java | 3 +- .../data/CompressedLongsSerdeTest.java | 3 +- ...ompressedVSizeIntsIndexedSupplierTest.java | 3 +- .../io/druid/segment/filter/InFilterTest.java | 3 +- .../io/druid/segment/filter/RowboatTest.java | 3 +- .../loading/SegmentizerFactoryTest.java | 3 +- .../AbstractCuratorServerInventoryView.java | 3 +- .../druid/client/CachingClusteredClient.java | 3 +- .../java/io/druid/client/DruidServer.java | 6 +- .../druid/client/HttpServerInventoryView.java | 15 ++- .../client/HttpServerInventoryViewConfig.java | 3 +- .../java/io/druid/client/cache/Cache.java | 6 +- .../io/druid/client/cache/CacheConfig.java | 6 +- ...mcachedCustomConnectionFactoryBuilder.java | 84 ++++++++++------ .../druid/client/selector/ServerSelector.java | 3 +- .../discovery/CuratorServiceUtils.java | 3 +- .../curator/discovery/DiscoveryModule.java | 32 +++--- .../discovery/ServerDiscoveryFactory.java | 3 +- .../inventory/CuratorInventoryManager.java | 6 +- .../main/java/io/druid/guice/CacheModule.java | 3 +- .../druid/initialization/Initialization.java | 3 +- .../druid/metadata/SQLMetadataConnector.java | 10 +- .../storage/derby/DerbyConnector.java | 13 ++- .../lookup/MapLookupExtractorFactory.java | 4 +- .../granularity/ArbitraryGranularitySpec.java | 3 +- .../granularity/UniformGranularitySpec.java | 3 +- .../loading/LocalDataSegmentKiller.java | 3 +- .../segment/loading/SegmentLoaderConfig.java | 3 +- .../realtime/FireDepartmentMetrics.java | 15 ++- .../appenderator/AppenderatorPlumber.java | 3 +- .../DefaultOfflineAppenderatorFactory.java | 3 +- .../ServiceAnnouncingChatHandlerProvider.java | 6 +- .../firehose/WikipediaIrcDecoder.java | 15 ++- .../main/java/io/druid/server/DruidNode.java | 3 +- .../java/io/druid/server/QueryResource.java | 12 ++- .../coordination/DruidServerMetadata.java | 3 +- .../SegmentChangeRequestHistory.java | 6 +- .../coordinator/BalancerStrategyFactory.java | 2 +- .../DiskNormalizedCostBalancerStrategy.java | 3 +- .../server/coordinator/DruidCoordinator.java | 5 +- .../server/coordinator/LoadQueuePeon.java | 6 +- .../coordinator/RandomBalancerStrategy.java | 22 ++--- .../server/http/HostAndPortWithScheme.java | 30 ++++-- .../druid/server/http/IntervalsResource.java | 4 +- .../http/LookupCoordinatorResource.java | 18 ++-- .../io/druid/server/http/RulesResource.java | 32 +++--- .../initialization/HttpEmitterModule.java | 6 +- .../server/initialization/ZkPathsConfig.java | 15 ++- .../jetty/ResponseHeaderFilterHolder.java | 4 +- .../jetty/ServletFilterHolder.java | 3 +- .../druid/server/log/FileRequestLogger.java | 7 +- .../cache/LookupCoordinatorManager.java | 33 ++++--- .../LookupExtractorFactoryMapContainer.java | 3 +- ...avaScriptTieredBrokerSelectorStrategy.java | 7 +- .../java/io/druid/server/security/Access.java | 9 +- .../io/druid/server/security/AuthConfig.java | 8 +- .../timeline/partition/LinearShardSpec.java | 9 +- .../client/CachingClusteredClientTest.java | 3 +- .../druid/client/cache/CacheConfigTest.java | 8 +- .../client/cache/CacheDistributionTest.java | 9 +- .../io/druid/curator/CuratorTestBase.java | 6 +- .../CuratorInventoryManagerTest.java | 6 +- .../guice/DruidProcessingModuleTest.java | 3 +- .../io/druid/guice/JsonConfigTesterBase.java | 3 +- .../metadata/SQLMetadataConnectorTest.java | 3 +- .../LookupIntrospectionResourceTest.java | 3 +- .../granularity/ArbitraryGranularityTest.java | 9 +- .../granularity/UniformGranularityTest.java | 9 +- .../appenderator/AppenderatorPlumberTest.java | 6 +- .../SegmentChangeRequestHistoryTest.java | 12 ++- .../coordinator/DruidCoordinatorTest.java | 3 +- .../rules/ForeverLoadRuleTest.java | 3 +- .../rules/IntervalLoadRuleTest.java | 3 +- .../coordinator/rules/PeriodLoadRuleTest.java | 3 +- .../http/CoordinatorDynamicConfigTest.java | 3 +- .../server/http/DatasourcesResourceTest.java | 3 +- .../server/http/IntervalsResourceTest.java | 3 +- .../cache/LookupCoordinatorManagerTest.java | 9 +- .../druid/cli/CliInternalHadoopIndexer.java | 3 +- .../main/java/io/druid/cli/ResetCluster.java | 6 +- .../java/io/druid/cli/ValidateSegments.java | 11 ++- .../DataSegmentPusherDefaultConverter.java | 6 +- .../convert/DatabasePropertiesConverter.java | 3 +- .../io/druid/cli/convert/ValueConverter.java | 58 ----------- .../sql/avatica/DruidAvaticaHandlerTest.java | 2 +- 328 files changed, 1934 insertions(+), 1489 deletions(-) delete mode 100644 services/src/main/java/io/druid/cli/convert/ValueConverter.java diff --git a/api/src/main/java/io/druid/data/input/MapBasedRow.java b/api/src/main/java/io/druid/data/input/MapBasedRow.java index 0d9a0258151..0d4cbc8be8e 100644 --- a/api/src/main/java/io/druid/data/input/MapBasedRow.java +++ b/api/src/main/java/io/druid/data/input/MapBasedRow.java @@ -21,7 +21,6 @@ package io.druid.data.input; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.base.Function; import com.google.common.collect.Lists; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; @@ -35,13 +34,11 @@ import java.util.regex.Pattern; */ public class MapBasedRow implements Row { - private static final Function TO_STRING_INCLUDING_NULL = String::valueOf; + private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+"); private final DateTime timestamp; private final Map event; - private static final Pattern LONG_PAT = Pattern.compile("[-|+]?\\d+"); - @JsonCreator public MapBasedRow( @JsonProperty("timestamp") DateTime timestamp, @@ -88,9 +85,7 @@ public class MapBasedRow implements Row return Collections.emptyList(); } else if (dimValue instanceof List) { // guava's toString function fails on null objects, so please do not use it - return Lists.transform( - (List) dimValue, - TO_STRING_INCLUDING_NULL); + return Lists.transform((List) dimValue, String::valueOf); } else { return Collections.singletonList(String.valueOf(dimValue)); } diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java index 69816e3914e..fd9acb62be1 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java @@ -77,7 +77,10 @@ public abstract class DimensionSchema SORTED_SET, ARRAY { @Override - public boolean needSorting() { return false;} + public boolean needSorting() + { + return false; + } }; public boolean needSorting() diff --git a/api/src/main/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactory.java b/api/src/main/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactory.java index b5bc768870f..96e2715b3d1 100644 --- a/api/src/main/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactory.java +++ b/api/src/main/java/io/druid/data/input/impl/PrefetchableTextFilesFirehoseFactory.java @@ -222,12 +222,10 @@ public abstract class PrefetchableTextFilesFirehoseFactory { if ((fetchFuture == null || fetchFuture.isDone()) && remainingBytes <= prefetchTriggerBytes) { - fetchFuture = fetchExecutor.submit( - () -> { - fetch(); - return null; - } - ); + fetchFuture = fetchExecutor.submit(() -> { + fetch(); + return null; + }); } } diff --git a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java index 2144e674e6d..aad2ea16871 100644 --- a/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java +++ b/api/src/main/java/io/druid/data/input/impl/TimestampSpec.java @@ -155,7 +155,8 @@ public class TimestampSpec //simple merge strategy on timestampSpec that checks if all are equal or else //returns null. this can be improved in future but is good enough for most use-cases. - public static TimestampSpec mergeTimestampSpec(List toMerge) { + public static TimestampSpec mergeTimestampSpec(List toMerge) + { if (toMerge == null || toMerge.size() == 0) { return null; } diff --git a/api/src/main/java/io/druid/guice/JsonConfigProvider.java b/api/src/main/java/io/druid/guice/JsonConfigProvider.java index f91e7ca0fc1..4e9b5f64f98 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigProvider.java +++ b/api/src/main/java/io/druid/guice/JsonConfigProvider.java @@ -146,11 +146,9 @@ public class JsonConfigProvider implements Provider> if (bindKey.getAnnotationType() != null) { supplierKey = Key.get(supType, bindKey.getAnnotationType()); - } - else if (bindKey.getAnnotation() != null) { + } else if (bindKey.getAnnotation() != null) { supplierKey = Key.get(supType, bindKey.getAnnotation()); - } - else { + } else { supplierKey = Key.get(supType); } diff --git a/api/src/main/java/io/druid/guice/JsonConfigurator.java b/api/src/main/java/io/druid/guice/JsonConfigurator.java index ef903320744..2327be61fba 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigurator.java +++ b/api/src/main/java/io/druid/guice/JsonConfigurator.java @@ -112,7 +112,7 @@ public class JsonConfigurator List messages = Lists.newArrayList(); for (ConstraintViolation violation : violations) { - String path = ""; + StringBuilder path = new StringBuilder(); try { Class beanClazz = violation.getRootBeanClass(); final Iterator iter = violation.getPropertyPath().iterator(); @@ -123,18 +123,17 @@ public class JsonConfigurator final Field theField = beanClazz.getDeclaredField(fieldName); if (theField.getAnnotation(JacksonInject.class) != null) { - path = StringUtils.format(" -- Injected field[%s] not bound!?", fieldName); + path = new StringBuilder(StringUtils.format(" -- Injected field[%s] not bound!?", fieldName)); break; } JsonProperty annotation = theField.getAnnotation(JsonProperty.class); final boolean noAnnotationValue = annotation == null || Strings.isNullOrEmpty(annotation.value()); final String pathPart = noAnnotationValue ? fieldName : annotation.value(); - if (path.isEmpty()) { - path += pathPart; - } - else { - path += "." + pathPart; + if (path.length() == 0) { + path.append(pathPart); + } else { + path.append(".").append(pathPart); } } } @@ -143,7 +142,7 @@ public class JsonConfigurator throw Throwables.propagate(e); } - messages.add(StringUtils.format("%s - %s", path, violation.getMessage())); + messages.add(StringUtils.format("%s - %s", path.toString(), violation.getMessage())); } throw new ProvisionException( diff --git a/api/src/main/java/io/druid/guice/LifecycleModule.java b/api/src/main/java/io/druid/guice/LifecycleModule.java index 36af8ede809..2d9968cdc3c 100644 --- a/api/src/main/java/io/druid/guice/LifecycleModule.java +++ b/api/src/main/java/io/druid/guice/LifecycleModule.java @@ -146,7 +146,8 @@ public class LifecycleModule implements Module final Key> keyHolderKey = Key.get(new TypeLiteral>(){}, Names.named("lifecycle")); final Set eagerClasses = injector.getInstance(keyHolderKey); - Lifecycle lifecycle = new Lifecycle(){ + Lifecycle lifecycle = new Lifecycle() + { @Override public void start() throws Exception { diff --git a/api/src/main/java/io/druid/guice/LifecycleScope.java b/api/src/main/java/io/druid/guice/LifecycleScope.java index 95269baa95b..42a34979a3d 100644 --- a/api/src/main/java/io/druid/guice/LifecycleScope.java +++ b/api/src/main/java/io/druid/guice/LifecycleScope.java @@ -71,8 +71,7 @@ public class LifecycleScope implements Scope synchronized (instances) { if (lifecycle == null) { instances.add(retVal); - } - else { + } else { try { lifecycle.addMaybeStartManagedInstance(retVal, stage); } diff --git a/api/src/main/java/io/druid/guice/PolyBind.java b/api/src/main/java/io/druid/guice/PolyBind.java index 76991f248d9..fddeb98c309 100644 --- a/api/src/main/java/io/druid/guice/PolyBind.java +++ b/api/src/main/java/io/druid/guice/PolyBind.java @@ -123,13 +123,11 @@ public class PolyBind return MapBinder.newMapBinder( binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotation() ); - } - else if (interfaceKey.getAnnotationType() != null) { + } else if (interfaceKey.getAnnotationType() != null) { return MapBinder.newMapBinder( binder, TypeLiteral.get(String.class), interfaceType, interfaceKey.getAnnotationType() ); - } - else { + } else { return MapBinder.newMapBinder(binder, TypeLiteral.get(String.class), interfaceType); } } @@ -177,11 +175,9 @@ public class PolyBind final Map> implsMap; if (key.getAnnotation() != null) { implsMap = (Map>) injector.getInstance(Key.get(mapType, key.getAnnotation())); - } - else if (key.getAnnotationType() != null) { + } else if (key.getAnnotationType() != null) { implsMap = (Map>) injector.getInstance(Key.get(mapType, key.getAnnotation())); - } - else { + } else { implsMap = (Map>) injector.getInstance(Key.get(mapType)); } diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java index 946725a884f..3076b5e7c1a 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java @@ -40,10 +40,14 @@ public interface DataSegmentPusher DataSegment push(File file, DataSegment segment) throws IOException; //use map instead of LoadSpec class to avoid dependency pollution. Map makeLoadSpec(URI finalIndexZipFilePath); - default String getStorageDir(DataSegment dataSegment) { + + default String getStorageDir(DataSegment dataSegment) + { return getDefaultStorageDir(dataSegment); } - default String makeIndexPathName(DataSegment dataSegment, String indexName) { + + default String makeIndexPathName(DataSegment dataSegment, String indexName) + { return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName); } @@ -60,7 +64,8 @@ public interface DataSegmentPusher // If above format is ever changed, make sure to change it appropriately in other places // e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories // on segment deletion if segment being deleted was the only segment - static String getDefaultStorageDir(DataSegment segment) { + static String getDefaultStorageDir(DataSegment segment) + { return JOINER.join( segment.getDataSource(), StringUtils.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()), diff --git a/api/src/main/java/io/druid/segment/loading/LoadSpec.java b/api/src/main/java/io/druid/segment/loading/LoadSpec.java index 3adef9c4513..d1945e16085 100644 --- a/api/src/main/java/io/druid/segment/loading/LoadSpec.java +++ b/api/src/main/java/io/druid/segment/loading/LoadSpec.java @@ -37,12 +37,17 @@ public interface LoadSpec public LoadSpecResult loadSegment(File destDir) throws SegmentLoadingException; // Hold interesting data about the results of the segment load - public static class LoadSpecResult{ + public static class LoadSpecResult + { private final long size; - public LoadSpecResult(long size){ + + public LoadSpecResult(long size) + { this.size = size; } - public long getSize(){ + + public long getSize() + { return this.size; } } diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java index 242c432bc18..4d8690e9576 100644 --- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java +++ b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java @@ -95,7 +95,8 @@ public class DataSegmentUtils version, trail ); - } catch (IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { return null; } } diff --git a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java b/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java index b8374a253a3..c116c60c8b8 100644 --- a/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java +++ b/api/src/main/java/io/druid/timeline/partition/NoneShardSpec.java @@ -35,12 +35,18 @@ public class NoneShardSpec implements ShardSpec private final static NoneShardSpec INSTANCE = new NoneShardSpec(); @JsonCreator - public static NoneShardSpec instance() { return INSTANCE; } + public static NoneShardSpec instance() + { + return INSTANCE; + } + /** + * @deprecated use {@link #instance()} instead + */ @Deprecated - // Use NoneShardSpec.instance() instead - public NoneShardSpec(){ - + public NoneShardSpec() + { + // empty } @Override diff --git a/api/src/main/java/io/druid/utils/Runnables.java b/api/src/main/java/io/druid/utils/Runnables.java index a70ff66de48..d6cec40db15 100644 --- a/api/src/main/java/io/druid/utils/Runnables.java +++ b/api/src/main/java/io/druid/utils/Runnables.java @@ -23,10 +23,8 @@ package io.druid.utils; */ public class Runnables { - public static Runnable getNoopRunnable(){ - return new Runnable(){ - @Override - public void run(){} - }; + public static Runnable getNoopRunnable() + { + return () -> {}; } } diff --git a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java b/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java index 3407496cd9c..69834c9aa54 100644 --- a/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java +++ b/api/src/test/java/io/druid/data/input/impl/JSONParseSpecTest.java @@ -19,19 +19,18 @@ package io.druid.data.input.impl; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; import io.druid.TestObjectMapper; +import org.junit.Assert; +import org.junit.Test; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; -import org.junit.Assert; -import org.junit.Test; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; - -public class JSONParseSpecTest { +public class JSONParseSpecTest +{ private final ObjectMapper jsonMapper = new TestObjectMapper(); @Test diff --git a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java index 7c1d728c31e..fbf3bc72ac0 100644 --- a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java +++ b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsConfig.java @@ -44,5 +44,8 @@ public class AWSCredentialsConfig return secretKey; } - public String getFileSessionCredentials() { return fileSessionCredentials; } + public String getFileSessionCredentials() + { + return fileSessionCredentials; + } } diff --git a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java index 75437ac491d..c38254d6c78 100644 --- a/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java +++ b/aws-common/src/main/java/io/druid/common/aws/AWSCredentialsUtils.java @@ -27,7 +27,8 @@ import com.amazonaws.auth.profile.ProfileCredentialsProvider; public class AWSCredentialsUtils { - public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config) { + public static AWSCredentialsProviderChain defaultAWSCredentialsProviderChain(final AWSCredentialsConfig config) + { return new AWSCredentialsProviderChain( new ConfigDrivenAwsCredentialsConfigProvider(config), new LazyFileSessionCredentialsProvider(config), diff --git a/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java b/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java index 8ec52378078..33eca26a2c3 100644 --- a/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java +++ b/aws-common/src/main/java/io/druid/common/aws/ConfigDrivenAwsCredentialsConfigProvider.java @@ -28,7 +28,8 @@ public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsP { private AWSCredentialsConfig config; - public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config) { + public ConfigDrivenAwsCredentialsConfigProvider(AWSCredentialsConfig config) + { this.config = config; } @@ -36,14 +37,17 @@ public class ConfigDrivenAwsCredentialsConfigProvider implements AWSCredentialsP public AWSCredentials getCredentials() { if (!Strings.isNullOrEmpty(config.getAccessKey()) && !Strings.isNullOrEmpty(config.getSecretKey())) { - return new AWSCredentials() { + return new AWSCredentials() + { @Override - public String getAWSAccessKeyId() { + public String getAWSAccessKeyId() + { return config.getAccessKey(); } @Override - public String getAWSSecretKey() { + public String getAWSSecretKey() + { return config.getSecretKey(); } }; diff --git a/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java b/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java index be3aeb7b5c1..4d6d5a328e7 100644 --- a/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java +++ b/aws-common/src/main/java/io/druid/common/aws/FileSessionCredentialsProvider.java @@ -32,48 +32,52 @@ import java.util.Properties; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; -public class FileSessionCredentialsProvider implements AWSCredentialsProvider { +public class FileSessionCredentialsProvider implements AWSCredentialsProvider +{ private final String sessionCredentials; private volatile String sessionToken; private volatile String accessKey; private volatile String secretKey; - private final ScheduledExecutorService scheduler = Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d"); + private final ScheduledExecutorService scheduler = + Execs.scheduledSingleThreaded("FileSessionCredentialsProviderRefresh-%d"); - public FileSessionCredentialsProvider(String sessionCredentials) { + public FileSessionCredentialsProvider(String sessionCredentials) + { this.sessionCredentials = sessionCredentials; refresh(); - scheduler.scheduleAtFixedRate(new Runnable() { - @Override - public void run() { - refresh(); - } - }, 1, 1, TimeUnit.HOURS); // refresh every hour + scheduler.scheduleAtFixedRate(this::refresh, 1, 1, TimeUnit.HOURS); // refresh every hour } @Override - public AWSCredentials getCredentials() { - return new AWSSessionCredentials() { + public AWSCredentials getCredentials() + { + return new AWSSessionCredentials() + { @Override - public String getSessionToken() { + public String getSessionToken() + { return sessionToken; } @Override - public String getAWSAccessKeyId() { + public String getAWSAccessKeyId() + { return accessKey; } @Override - public String getAWSSecretKey() { + public String getAWSSecretKey() + { return secretKey; } }; } @Override - public void refresh() { + public void refresh() + { try { Properties props = new Properties(); InputStream is = new FileInputStream(new File(sessionCredentials)); @@ -83,7 +87,8 @@ public class FileSessionCredentialsProvider implements AWSCredentialsProvider { sessionToken = props.getProperty("sessionToken"); accessKey = props.getProperty("accessKey"); secretKey = props.getProperty("secretKey"); - } catch (IOException e) { + } + catch (IOException e) { throw new RuntimeException("cannot refresh AWS credentials", e); } } diff --git a/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java b/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java index ccab7ed3145..029ed348577 100644 --- a/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java +++ b/aws-common/src/main/java/io/druid/common/aws/LazyFileSessionCredentialsProvider.java @@ -27,11 +27,13 @@ public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvide private AWSCredentialsConfig config; private FileSessionCredentialsProvider provider; - public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config) { + public LazyFileSessionCredentialsProvider(AWSCredentialsConfig config) + { this.config = config; } - private FileSessionCredentialsProvider getUnderlyingProvider() { + private FileSessionCredentialsProvider getUnderlyingProvider() + { if (provider == null) { synchronized (config) { if (provider == null) { @@ -49,7 +51,8 @@ public class LazyFileSessionCredentialsProvider implements AWSCredentialsProvide } @Override - public void refresh() { + public void refresh() + { getUnderlyingProvider().refresh(); } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java index f68f025764b..2551df1071b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/BitmapIterationBenchmark.java @@ -77,7 +77,8 @@ public class BitmapIterationBenchmark @Param({"1000000"}) public int size; - private BitmapFactory makeFactory() { + private BitmapFactory makeFactory() + { switch (bitmapAlgo) { case "bitset": return new BitSetBitmapFactory(); diff --git a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java index eedd9a530ea..2ea82e17d1d 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FlattenJSONBenchmark.java @@ -111,7 +111,8 @@ public class FlattenJSONBenchmark return parsed; } - public static void main(String[] args) throws RunnerException { + public static void main(String[] args) throws RunnerException + { Options opt = new OptionsBuilder() .include(FlattenJSONBenchmark.class.getSimpleName()) .warmupIterations(1) diff --git a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnSchema.java b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnSchema.java index b477f84add5..a7490a90663 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnSchema.java +++ b/benchmarks/src/main/java/io/druid/benchmark/datagen/BenchmarkColumnSchema.java @@ -240,7 +240,7 @@ public class BenchmarkColumnSchema schema.startInt = startInt; schema.endInt = endInt; return schema; - }; + } public static BenchmarkColumnSchema makeEnumeratedSequential( String name, @@ -261,7 +261,7 @@ public class BenchmarkColumnSchema ); schema.enumeratedValues = enumeratedValues; return schema; - }; + } public static BenchmarkColumnSchema makeDiscreteUniform( String name, @@ -284,7 +284,7 @@ public class BenchmarkColumnSchema schema.startInt = startInt; schema.endInt = endInt; return schema; - }; + } public static BenchmarkColumnSchema makeEnumeratedDiscreteUniform( String name, @@ -305,7 +305,7 @@ public class BenchmarkColumnSchema ); schema.enumeratedValues = enumeratedValues; return schema; - }; + } public static BenchmarkColumnSchema makeContinuousUniform( String name, @@ -328,8 +328,7 @@ public class BenchmarkColumnSchema schema.startDouble = startDouble; schema.endDouble = endDouble; return schema; - }; - + } public static BenchmarkColumnSchema makeNormal( String name, @@ -353,7 +352,7 @@ public class BenchmarkColumnSchema schema.mean = mean; schema.standardDeviation = standardDeviation; return schema; - }; + } public static BenchmarkColumnSchema makeZipf( String name, @@ -378,7 +377,7 @@ public class BenchmarkColumnSchema schema.endInt = endInt; schema.zipfExponent = zipfExponent; return schema; - }; + } public static BenchmarkColumnSchema makeEnumeratedZipf( String name, @@ -401,8 +400,7 @@ public class BenchmarkColumnSchema schema.enumeratedValues = enumeratedValues; schema.zipfExponent = zipfExponent; return schema; - }; - + } public static BenchmarkColumnSchema makeEnumerated( String name, @@ -425,5 +423,5 @@ public class BenchmarkColumnSchema schema.enumeratedValues = enumeratedValues; schema.enumeratedProbabilities = enumeratedProbabilities; return schema; - }; + } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java b/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java index a06bb2ba975..3336d2946ff 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java +++ b/benchmarks/src/main/java/io/druid/benchmark/query/QueryBenchmarkUtil.java @@ -55,8 +55,8 @@ public class QueryBenchmarkUtil { return new IntervalChunkingQueryRunnerDecorator(null, null, null) { @Override - public QueryRunner decorate(final QueryRunner delegate, - QueryToolChest> toolChest) { + public QueryRunner decorate(final QueryRunner delegate, QueryToolChest> toolChest) + { return new QueryRunner() { @Override public Sequence run(QueryPlus queryPlus, Map responseContext) diff --git a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java index 1305679cd78..dd6f271f066 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/BenchmarkDataGeneratorTest.java @@ -37,7 +37,8 @@ import java.util.Map; public class BenchmarkDataGeneratorTest { @Test - public void testSequential() throws Exception { + public void testSequential() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -86,7 +87,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testDiscreteUniform() throws Exception { + public void testDiscreteUniform() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -150,7 +152,8 @@ public class BenchmarkDataGeneratorTest @Test - public void testRoundedNormal() throws Exception { + public void testRoundedNormal() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -192,7 +195,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testZipf() throws Exception { + public void testZipf() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -246,7 +250,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testEnumerated() throws Exception { + public void testEnumerated() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -274,7 +279,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testNormal() throws Exception { + public void testNormal() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -316,7 +322,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testRealUniform() throws Exception { + public void testRealUniform() throws Exception + { List schemas = new ArrayList<>(); RowValueTracker tracker = new RowValueTracker(); @@ -356,7 +363,8 @@ public class BenchmarkDataGeneratorTest } @Test - public void testIntervalBasedTimeGeneration() throws Exception { + public void testIntervalBasedTimeGeneration() throws Exception + { List schemas = new ArrayList<>(); schemas.add( @@ -388,11 +396,13 @@ public class BenchmarkDataGeneratorTest { private Map> dimensionMap; - public RowValueTracker() { + public RowValueTracker() + { dimensionMap = new HashMap<>(); } - public void addRow(InputRow row) { + public void addRow(InputRow row) + { for (String dim : row.getDimensions()) { if (dimensionMap.get(dim) == null) { dimensionMap.put(dim, new HashMap()); diff --git a/benchmarks/src/test/java/io/druid/benchmark/FlattenJSONBenchmarkUtilTest.java b/benchmarks/src/test/java/io/druid/benchmark/FlattenJSONBenchmarkUtilTest.java index 324122ab337..34ac73177f6 100644 --- a/benchmarks/src/test/java/io/druid/benchmark/FlattenJSONBenchmarkUtilTest.java +++ b/benchmarks/src/test/java/io/druid/benchmark/FlattenJSONBenchmarkUtilTest.java @@ -29,7 +29,8 @@ import java.util.Map; public class FlattenJSONBenchmarkUtilTest { @Test - public void testOne() throws Exception { + public void testOne() throws Exception + { FlattenJSONBenchmarkUtil eventGen = new FlattenJSONBenchmarkUtil(); String newEvent = eventGen.generateFlatEvent(); @@ -45,7 +46,8 @@ public class FlattenJSONBenchmarkUtilTest checkEvent2(event2); } - public void checkEvent1(Map event) { + public void checkEvent1(Map event) + { Assert.assertEquals("2015-09-12T12:10:53.155Z", event.get("ts").toString()); Assert.assertEquals("-1170723877", event.get("d1").toString()); Assert.assertEquals("238976084", event.get("d2").toString()); @@ -74,7 +76,8 @@ public class FlattenJSONBenchmarkUtilTest Assert.assertEquals("1414285347", event.get("ae1[2].e1.d2").toString()); } - public void checkEvent2(Map event2) { + public void checkEvent2(Map event2) + { Assert.assertEquals("728062074", event2.get("ae1[0].d1").toString()); Assert.assertEquals("1701675101", event2.get("ae1[1].d1").toString()); Assert.assertEquals("1887775139", event2.get("ae1[2].e1.d2").toString()); diff --git a/codestyle/checkstyle-suppressions.xml b/codestyle/checkstyle-suppressions.xml index 57f6482954b..eedddb162c5 100644 --- a/codestyle/checkstyle-suppressions.xml +++ b/codestyle/checkstyle-suppressions.xml @@ -37,6 +37,8 @@ + + diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index 860df49e13e..9d51b54a7b3 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -51,6 +51,23 @@ + + + + + + + + + + + + + + + + + diff --git a/common/src/main/java/io/druid/collections/DefaultBlockingPool.java b/common/src/main/java/io/druid/collections/DefaultBlockingPool.java index 6e30add171c..602d2e6a7dc 100644 --- a/common/src/main/java/io/druid/collections/DefaultBlockingPool.java +++ b/common/src/main/java/io/druid/collections/DefaultBlockingPool.java @@ -120,7 +120,8 @@ public class DefaultBlockingPool implements BlockingPool lock.lock(); try { return objects.isEmpty() ? null : objects.pop(); - } finally { + } + finally { lock.unlock(); } } @@ -138,7 +139,8 @@ public class DefaultBlockingPool implements BlockingPool nanos = notEnough.awaitNanos(nanos); } return objects.pop(); - } finally { + } + finally { lock.unlock(); } } @@ -152,7 +154,8 @@ public class DefaultBlockingPool implements BlockingPool notEnough.await(); } return objects.pop(); - } finally { + } + finally { lock.unlock(); } } @@ -211,7 +214,8 @@ public class DefaultBlockingPool implements BlockingPool } return list; } - } finally { + } + finally { lock.unlock(); } } @@ -233,7 +237,8 @@ public class DefaultBlockingPool implements BlockingPool list.add(objects.pop()); } return list; - } finally { + } + finally { lock.unlock(); } } @@ -251,7 +256,8 @@ public class DefaultBlockingPool implements BlockingPool list.add(objects.pop()); } return list; - } finally { + } + finally { lock.unlock(); } } @@ -272,7 +278,8 @@ public class DefaultBlockingPool implements BlockingPool } else { throw new ISE("Cannot exceed pre-configured maximum size"); } - } finally { + } + finally { lock.unlock(); } } @@ -290,7 +297,8 @@ public class DefaultBlockingPool implements BlockingPool } else { throw new ISE("Cannot exceed pre-configured maximum size"); } - } finally { + } + finally { lock.unlock(); } } diff --git a/common/src/main/java/io/druid/collections/OrderedMergeSequence.java b/common/src/main/java/io/druid/collections/OrderedMergeSequence.java index 525b94357e4..19d02be9164 100644 --- a/common/src/main/java/io/druid/collections/OrderedMergeSequence.java +++ b/common/src/main/java/io/druid/collections/OrderedMergeSequence.java @@ -119,8 +119,7 @@ public class OrderedMergeSequence implements Sequence throw Throwables.propagate(e); } return null; - } - else { + } else { yield(); } @@ -144,19 +143,16 @@ public class OrderedMergeSequence implements Sequence Yielder yielder; if (oldDudeAtCrosswalk.isDone()) { yielder = pQueue.remove(); - } - else if (pQueue.isEmpty()) { + } else if (pQueue.isEmpty()) { yielder = oldDudeAtCrosswalk.get(); oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null); - } - else { + } else { Yielder queueYielder = pQueue.peek(); Yielder iterYielder = oldDudeAtCrosswalk.get(); if (ordering.compare(queueYielder.get(), iterYielder.get()) <= 0) { yielder = pQueue.remove(); - } - else { + } else { yielder = oldDudeAtCrosswalk.get(); oldDudeAtCrosswalk = oldDudeAtCrosswalk.next(null); } @@ -171,8 +167,7 @@ public class OrderedMergeSequence implements Sequence catch (IOException e) { throw Throwables.propagate(e); } - } - else { + } else { pQueue.add(yielder); } } diff --git a/common/src/main/java/io/druid/collections/StupidPool.java b/common/src/main/java/io/druid/collections/StupidPool.java index a815325ca37..5c8d2bb01c5 100644 --- a/common/src/main/java/io/druid/collections/StupidPool.java +++ b/common/src/main/java/io/druid/collections/StupidPool.java @@ -118,7 +118,8 @@ public class StupidPool implements NonBlockingPool } @VisibleForTesting - long poolSize() { + long poolSize() + { return poolSize.get(); } diff --git a/common/src/main/java/io/druid/common/config/Log4jShutdown.java b/common/src/main/java/io/druid/common/config/Log4jShutdown.java index a7a2b550737..8be4ab4434b 100644 --- a/common/src/main/java/io/druid/common/config/Log4jShutdown.java +++ b/common/src/main/java/io/druid/common/config/Log4jShutdown.java @@ -150,7 +150,8 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, LifeCycle @GuardedBy("this") private State current; - private SynchronizedStateHolder(State initial) { + private SynchronizedStateHolder(State initial) + { current = initial; } diff --git a/common/src/main/java/io/druid/common/guava/CombiningSequence.java b/common/src/main/java/io/druid/common/guava/CombiningSequence.java index 6cb465d8960..58cd46bc31a 100644 --- a/common/src/main/java/io/druid/common/guava/CombiningSequence.java +++ b/common/src/main/java/io/druid/common/guava/CombiningSequence.java @@ -107,8 +107,7 @@ public class CombiningSequence implements Sequence } else { finalYielder = Yielders.done(null, yielder); } - } - else { + } else { return Yielders.done(combiningAccumulator.getRetVal(), yielder); } } diff --git a/common/src/main/java/io/druid/common/utils/PropUtils.java b/common/src/main/java/io/druid/common/utils/PropUtils.java index 53cd4017361..1d73834ebd5 100644 --- a/common/src/main/java/io/druid/common/utils/PropUtils.java +++ b/common/src/main/java/io/druid/common/utils/PropUtils.java @@ -19,10 +19,10 @@ package io.druid.common.utils; -import java.util.Properties; - import io.druid.java.util.common.ISE; +import java.util.Properties; + /** */ public class PropUtils @@ -50,8 +50,7 @@ public class PropUtils if (retVal == null) { if (defaultValue == null) { throw new ISE("Property[%s] not specified.", property); - } - else { + } else { return defaultValue; } } diff --git a/common/src/main/java/io/druid/common/utils/SocketUtil.java b/common/src/main/java/io/druid/common/utils/SocketUtil.java index 4577d958053..85be94e86a5 100644 --- a/common/src/main/java/io/druid/common/utils/SocketUtil.java +++ b/common/src/main/java/io/druid/common/utils/SocketUtil.java @@ -19,12 +19,12 @@ package io.druid.common.utils; +import io.druid.java.util.common.ISE; + import java.io.IOException; import java.net.ServerSocket; import java.util.Random; -import io.druid.java.util.common.ISE; - /** */ public class SocketUtil @@ -38,7 +38,8 @@ public class SocketUtil return findOpenPortFrom(startPort); } - public static int findOpenPortFrom(int startPort) { + public static int findOpenPortFrom(int startPort) + { int currPort = startPort; while (currPort < 0xffff) { diff --git a/common/src/main/java/io/druid/guice/DruidSecondaryModule.java b/common/src/main/java/io/druid/guice/DruidSecondaryModule.java index f879ea48302..e56cc6c2723 100644 --- a/common/src/main/java/io/druid/guice/DruidSecondaryModule.java +++ b/common/src/main/java/io/druid/guice/DruidSecondaryModule.java @@ -85,7 +85,8 @@ public class DruidSecondaryModule implements Module return smileMapper; } - private void setupJackson(Injector injector, final ObjectMapper mapper) { + private void setupJackson(Injector injector, final ObjectMapper mapper) + { final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector(); mapper.setInjectableValues(new GuiceInjectableValues(injector)); diff --git a/common/src/main/java/io/druid/guice/GuiceInjectableValues.java b/common/src/main/java/io/druid/guice/GuiceInjectableValues.java index 11f18018423..60f1a8dcdf7 100644 --- a/common/src/main/java/io/druid/guice/GuiceInjectableValues.java +++ b/common/src/main/java/io/druid/guice/GuiceInjectableValues.java @@ -33,11 +33,17 @@ public class GuiceInjectableValues extends InjectableValues { private final Injector injector; - public GuiceInjectableValues(Injector injector) {this.injector = injector;} + public GuiceInjectableValues(Injector injector) + { + this.injector = injector; + } @Override public Object findInjectableValue( - Object valueId, DeserializationContext ctxt, BeanProperty forProperty, Object beanInstance + Object valueId, + DeserializationContext ctxt, + BeanProperty forProperty, + Object beanInstance ) { // From the docs: "Object that identifies value to inject; may be a simple name or more complex identifier object, diff --git a/common/src/main/java/io/druid/guice/JacksonConfigProvider.java b/common/src/main/java/io/druid/guice/JacksonConfigProvider.java index 24bc5deaaa8..13043638a26 100644 --- a/common/src/main/java/io/druid/guice/JacksonConfigProvider.java +++ b/common/src/main/java/io/druid/guice/JacksonConfigProvider.java @@ -85,8 +85,7 @@ public class JacksonConfigProvider implements Provider> { if (clazz == null) { return DSuppliers.of(configManager.watch(key, typeRef, defaultVal)); - } - else { + } else { return DSuppliers.of(configManager.watch(key, clazz, defaultVal)); } } diff --git a/common/src/main/java/io/druid/metadata/DefaultPasswordProvider.java b/common/src/main/java/io/druid/metadata/DefaultPasswordProvider.java index 8c15758b161..65a172cb721 100644 --- a/common/src/main/java/io/druid/metadata/DefaultPasswordProvider.java +++ b/common/src/main/java/io/druid/metadata/DefaultPasswordProvider.java @@ -46,7 +46,8 @@ public class DefaultPasswordProvider implements PasswordProvider } @Override - public String toString() { + public String toString() + { return this.getClass().getCanonicalName(); } diff --git a/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java b/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java index 087860e1724..bd13d7e4515 100644 --- a/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java +++ b/common/src/test/java/io/druid/collections/ReferenceCountingResourceHolderTest.java @@ -45,20 +45,16 @@ public class ReferenceCountingResourceHolderTest final ReferenceCountingResourceHolder resourceHolder = makeReleasingHandler(released); List threads = new ArrayList<>(); for (int i = 0; i < 100; i++) { - Thread thread = new Thread() { - @Override - public void run() - { - try (Releaser r = resourceHolder.increment()) { - try { - Thread.sleep(1); - } - catch (InterruptedException e) { - throw new RuntimeException(e); - } + Thread thread = new Thread(() -> { + try (Releaser r = resourceHolder.increment()) { + try { + Thread.sleep(1); + } + catch (InterruptedException e) { + throw new RuntimeException(e); } } - }; + }); thread.start(); threads.add(thread); } diff --git a/common/src/test/java/io/druid/common/utils/VMUtilsTest.java b/common/src/test/java/io/druid/common/utils/VMUtilsTest.java index ac4d659c47f..acbffb07de9 100644 --- a/common/src/test/java/io/druid/common/utils/VMUtilsTest.java +++ b/common/src/test/java/io/druid/common/utils/VMUtilsTest.java @@ -30,9 +30,11 @@ public class VMUtilsTest try { long maxMemory = VMUtils.getMaxDirectMemory(); Assert.assertTrue((maxMemory > 0)); - } catch (UnsupportedOperationException expected) { + } + catch (UnsupportedOperationException expected) { Assert.assertTrue(true); - } catch (RuntimeException expected) { + } + catch (RuntimeException expected) { Assert.assertTrue(true); } } diff --git a/common/src/test/java/io/druid/metadata/DefaultPasswordProviderTest.java b/common/src/test/java/io/druid/metadata/DefaultPasswordProviderTest.java index 4f4efa7c7c4..052a82833dd 100644 --- a/common/src/test/java/io/druid/metadata/DefaultPasswordProviderTest.java +++ b/common/src/test/java/io/druid/metadata/DefaultPasswordProviderTest.java @@ -29,26 +29,30 @@ public class DefaultPasswordProviderTest private static final ObjectMapper jsonMapper = new ObjectMapper(); @Test - public void testExplicitConstruction() { + public void testExplicitConstruction() + { DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd); Assert.assertEquals(pwd, pp.getPassword()); } @Test - public void testFromStringConstruction() { + public void testFromStringConstruction() + { DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(pwd); Assert.assertEquals(pwd, pp.getPassword()); } @Test - public void testDeserializationFromJsonString() throws Exception { + public void testDeserializationFromJsonString() throws Exception + { PasswordProvider pp = jsonMapper.readValue("\"" + pwd + "\"", PasswordProvider.class); Assert.assertEquals(pwd, pp.getPassword()); } @Test - public void testDeserializationFromJson() throws Exception { + public void testDeserializationFromJson() throws Exception + { PasswordProvider pp = jsonMapper.readValue( "{\"type\": \"default\", \"password\": \"" + pwd + "\"}", PasswordProvider.class); diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java index 4f28892e725..4bf6740e234 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java @@ -716,16 +716,14 @@ public class ConciseSet extends AbstractIntSet implements Serializable int minCount = Math.min(thisItr.count, otherItr.count); res.appendFill(minCount, operator.combineLiterals(thisItr.word, otherItr.word)); //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT || - { + if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) { break; } } else { res.appendLiteral(operator.combineLiterals(thisItr.toLiteral(), otherItr.word)); thisItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) { break; } } @@ -733,15 +731,13 @@ public class ConciseSet extends AbstractIntSet implements Serializable res.appendLiteral(operator.combineLiterals(thisItr.word, otherItr.toLiteral())); otherItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) { break; } } else { res.appendLiteral(operator.combineLiterals(thisItr.word, otherItr.word)); //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) { break; } } @@ -841,29 +837,25 @@ public class ConciseSet extends AbstractIntSet implements Serializable if ((ConciseSetUtils.SEQUENCE_BIT & thisItr.word & otherItr.word) != 0) { res += maxLiteralLengthMultiplication(minCount); } - if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT || - { + if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) { break; } } else { res += getLiteralBitCount(thisItr.toLiteral() & otherItr.word); thisItr.word--; - if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) { break; } } } else if (!otherItr.isLiteral) { res += getLiteralBitCount(thisItr.word & otherItr.toLiteral()); otherItr.word--; - if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) { break; } } else { res += getLiteralBitCount(thisItr.word & otherItr.word); - if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) { break; } } @@ -937,10 +929,9 @@ public class ConciseSet extends AbstractIntSet implements Serializable } } else { setBitsInCurrentWord = sequenceLength - 1; - if (position < setBitsInCurrentWord) - // check whether the desired set bit is after the - // flipped bit (or after the first block) - { + if (position < setBitsInCurrentWord) { + // check whether the desired set bit is after the + // flipped bit (or after the first block) return firstSetBitInWord + position + (position < getFlippedBit(w) ? 0 : 1); } } @@ -1132,13 +1123,11 @@ public class ConciseSet extends AbstractIntSet implements Serializable // complement each word for (int i = 0; i <= lastWordIndex; i++) { int w = words[i]; - if (isLiteral(w)) - // negate the bits and set the most significant bit to 1 - { + if (isLiteral(w)) { + // negate the bits and set the most significant bit to 1 words[i] = ConciseSetUtils.ALL_ZEROS_LITERAL | ~w; - } else - // switch the sequence type - { + } else { + // switch the sequence type words[i] ^= ConciseSetUtils.SEQUENCE_BIT; } } @@ -1724,8 +1713,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable return true; } //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT || - { + if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) { return false; } } else { @@ -1734,8 +1722,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } thisItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) { return false; } } @@ -1745,8 +1732,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } otherItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) { return false; } } else { @@ -1754,8 +1740,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable return true; } //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) { return false; } } @@ -1817,8 +1802,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } } //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(minCount) | !otherItr.prepareNext(minCount)) // NOT || - { + if (!thisItr.prepareNext(minCount) | /* NOT || */ !otherItr.prepareNext(minCount)) { return false; } } else { @@ -1828,8 +1812,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } thisItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext(1) | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext(1) | /* do NOT use "||" */ !otherItr.prepareNext()) { return false; } } @@ -1840,8 +1823,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } otherItr.word--; //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext(1)) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext(1)) { return false; } } else { @@ -1850,8 +1832,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable return true; } //noinspection NonShortCircuitBooleanExpression - if (!thisItr.prepareNext() | !otherItr.prepareNext()) // do NOT use "||" - { + if (!thisItr.prepareNext() | /* do NOT use "||" */ !otherItr.prepareNext()) { return false; } } @@ -2063,9 +2044,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable // start comparing from the end, that is at blocks with no // (un)set bits if (isZeroSequence(thisWord)) { - if (isOneSequence(otherWord)) - // zeros < ones - { + if (isOneSequence(otherWord)) { + // zeros < ones return -1; } // compare two sequences of zeros @@ -2074,9 +2054,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable return res < 0 ? -1 : 1; } } else { - if (isZeroSequence(otherWord)) - // ones > zeros - { + if (isZeroSequence(otherWord)) { + // ones > zeros return 1; } // compare two sequences of ones @@ -2318,9 +2297,8 @@ public class ConciseSet extends AbstractIntSet implements Serializable */ private void writeObject(ObjectOutputStream s) throws IOException { - if (words != null && lastWordIndex < words.length - 1) - // compact before serializing - { + if (words != null && lastWordIndex < words.length - 1) { + // compact before serializing words = Arrays.copyOf(words, lastWordIndex + 1); } s.defaultWriteObject(); @@ -2562,7 +2540,7 @@ public class ConciseSet extends AbstractIntSet implements Serializable } return null; } - },; + }; /** * Performs the operation on the given literals diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java index 6347c53f30d..412a03f8dcd 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ImmutableConciseSet.java @@ -928,10 +928,9 @@ public class ImmutableConciseSet } } else { setBitsInCurrentWord = sequenceLength - 1; - if (position < setBitsInCurrentWord) - // check whether the desired set bit is after the - // flipped bit (or after the first block) - { + if (position < setBitsInCurrentWord) { + // check whether the desired set bit is after the + // flipped bit (or after the first block) return firstSetBitInWord + position + (position < ConciseSetUtils.getFlippedBit(w) ? 0 : 1); } } diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java index 56069fa650c..ad029a08787 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java +++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitterModule.java @@ -54,7 +54,8 @@ public class AmbariMetricsEmitterModule implements DruidModule @Provides @ManageLifecycle @Named(EMITTER_TYPE) - public Emitter getEmitter(AmbariMetricsEmitterConfig emitterConfig, final Injector injector){ + public Emitter getEmitter(AmbariMetricsEmitterConfig emitterConfig, final Injector injector) + { List emitters = Lists.transform( emitterConfig.getAlertEmitters(), new Function() diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/AzureBlob.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/AzureBlob.java index 98cd1a8ec4b..6a4eca1afee 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/AzureBlob.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/firehose/azure/AzureBlob.java @@ -24,7 +24,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import javax.validation.constraints.NotNull; -public class AzureBlob { +public class AzureBlob +{ @JsonProperty @NotNull private String container = null; @@ -33,19 +34,23 @@ public class AzureBlob { @NotNull private String path = null; - public AzureBlob() { + public AzureBlob() + { } - public AzureBlob(String container, String path) { + public AzureBlob(String container, String path) + { this.container = container; this.path = path; } - public String getContainer() { + public String getContainer() + { return container; } - public String getPath() { + public String getPath() + { return path; } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java index 342f97c2af8..d052edc1d30 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureAccountConfig.java @@ -70,13 +70,28 @@ public class AzureAccountConfig this.container = container; } - public String getProtocol() { return protocol; } + public String getProtocol() + { + return protocol; + } - public int getMaxTries() { return maxTries; } + public int getMaxTries() + { + return maxTries; + } - public String getAccount() { return account; } + public String getAccount() + { + return account; + } - public String getKey() { return key;} + public String getKey() + { + return key; + } - public String getContainer() { return container; } + public String getContainer() + { + return container; + } } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java index e7d071ea00d..5136a1cdc49 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorage.java @@ -93,12 +93,14 @@ public class AzureStorage } public CloudBlockBlob getBlob(final String containerName, final String blobPath) - throws URISyntaxException, StorageException { + throws URISyntaxException, StorageException + { return getCloudBlobContainer(containerName).getBlockBlobReference(blobPath); } public long getBlobLength(final String containerName, final String blobPath) - throws URISyntaxException, StorageException { + throws URISyntaxException, StorageException + { return getCloudBlobContainer(containerName).getBlockBlobReference(blobPath).getProperties().getLength(); } @@ -109,8 +111,8 @@ public class AzureStorage return container.getBlockBlobReference(blobPath).openInputStream(); } - public boolean getBlobExists(String container, String blobPath) - throws URISyntaxException, StorageException { + public boolean getBlobExists(String container, String blobPath) throws URISyntaxException, StorageException + { return getCloudBlobContainer(container).getBlockBlobReference(blobPath).exists(); } } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java index 790ec477710..6525bff7e88 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java @@ -35,7 +35,8 @@ import java.io.InputStream; import java.net.URISyntaxException; import java.util.concurrent.Callable; -public class AzureTaskLogs implements TaskLogs { +public class AzureTaskLogs implements TaskLogs +{ private static final Logger log = new Logger(AzureTaskLogs.class); @@ -43,34 +44,35 @@ public class AzureTaskLogs implements TaskLogs { private final AzureStorage azureStorage; @Inject - public AzureTaskLogs(AzureTaskLogsConfig config, AzureStorage azureStorage) { + public AzureTaskLogs(AzureTaskLogsConfig config, AzureStorage azureStorage) + { this.config = config; this.azureStorage = azureStorage; } @Override - public void pushTaskLog(final String taskid, final File logFile) throws IOException { + public void pushTaskLog(final String taskid, final File logFile) throws IOException + { final String taskKey = getTaskLogKey(taskid); log.info("Pushing task log %s to: %s", logFile, taskKey); try { AzureUtils.retryAzureOperation( - new Callable() { - @Override - public Void call() throws Exception { - azureStorage.uploadBlob(logFile, config.getContainer(), taskKey); - return null; - } + (Callable) () -> { + azureStorage.uploadBlob(logFile, config.getContainer(), taskKey); + return null; }, config.getMaxTries() ); - } catch (Exception e) { + } + catch (Exception e) { throw Throwables.propagate(e); } } @Override - public Optional streamTaskLog(final String taskid, final long offset) throws IOException { + public Optional streamTaskLog(final String taskid, final long offset) throws IOException + { final String container = config.getContainer(); final String taskKey = getTaskLogKey(taskid); @@ -80,9 +82,11 @@ public class AzureTaskLogs implements TaskLogs { } return Optional.of( - new ByteSource() { + new ByteSource() + { @Override - public InputStream openStream() throws IOException { + public InputStream openStream() throws IOException + { try { final long start; final long length = azureStorage.getBlobLength(container, taskKey); @@ -100,19 +104,22 @@ public class AzureTaskLogs implements TaskLogs { return stream; - } catch(Exception e) { + } + catch(Exception e) { throw new IOException(e); } } } ); - } catch (StorageException | URISyntaxException e) { + } + catch (StorageException | URISyntaxException e) { throw new IOE(e, "Failed to stream logs from: %s", taskKey); } } - private String getTaskLogKey(String taskid) { + private String getTaskLogKey(String taskid) + { return StringUtils.format("%s/%s/log", config.getPrefix(), taskid); } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogsConfig.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogsConfig.java index bc79576fcdb..4812e0445a7 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogsConfig.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogsConfig.java @@ -24,7 +24,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; -public class AzureTaskLogsConfig { +public class AzureTaskLogsConfig +{ @JsonProperty @NotNull private String container = null; @@ -37,25 +38,29 @@ public class AzureTaskLogsConfig { @Min(1) private int maxTries = 3; - public AzureTaskLogsConfig() { - + public AzureTaskLogsConfig() + { } - public AzureTaskLogsConfig(String container, String prefix, int maxTries) { + public AzureTaskLogsConfig(String container, String prefix, int maxTries) + { this.container = container; this.prefix = prefix; this.maxTries = maxTries; } - public String getContainer() { + public String getContainer() + { return container; } - public String getPrefix() { + public String getPrefix() + { return prefix; } - public int getMaxTries() { + public int getMaxTries() + { return maxTries; } } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java index bf711eaf26b..0bbabd6594e 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureUtils.java @@ -53,7 +53,8 @@ public class AzureUtils } }; - public static T retryAzureOperation(Callable f, int maxTries) throws Exception { + public static T retryAzureOperation(Callable f, int maxTries) throws Exception + { return RetryUtils.retry(f, AZURE_RETRY, maxTries); } } diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureTaskLogsTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureTaskLogsTest.java index 24c3c49e193..26ff677af17 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureTaskLogsTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureTaskLogsTest.java @@ -38,7 +38,8 @@ import java.io.StringWriter; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; -public class AzureTaskLogsTest extends EasyMockSupport { +public class AzureTaskLogsTest extends EasyMockSupport +{ private static final String container = "test"; private static final String prefix = "test/log"; @@ -49,14 +50,16 @@ public class AzureTaskLogsTest extends EasyMockSupport { private AzureTaskLogs azureTaskLogs; @Before - public void before() { + public void before() + { azureStorage = createMock(AzureStorage.class); azureTaskLogs = new AzureTaskLogs(azureTaskLogsConfig, azureStorage); } @Test - public void testPushTaskLog() throws Exception { + public void testPushTaskLog() throws Exception + { final File tmpDir = Files.createTempDir(); try { @@ -70,13 +73,15 @@ public class AzureTaskLogsTest extends EasyMockSupport { azureTaskLogs.pushTaskLog(taskid, logFile); verifyAll(); - } finally { + } + finally { FileUtils.deleteDirectory(tmpDir); } } @Test - public void testStreamTaskLogWithoutOffset() throws Exception { + public void testStreamTaskLogWithoutOffset() throws Exception + { final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; @@ -98,7 +103,8 @@ public class AzureTaskLogsTest extends EasyMockSupport { } @Test - public void testStreamTaskLogWithPositiveOffset() throws Exception { + public void testStreamTaskLogWithPositiveOffset() throws Exception + { final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; @@ -120,7 +126,8 @@ public class AzureTaskLogsTest extends EasyMockSupport { } @Test - public void testStreamTaskLogWithNegative() throws Exception { + public void testStreamTaskLogWithNegative() throws Exception + { final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java index 0b48d4964ac..469cd9a0ee7 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPuller.java @@ -23,15 +23,14 @@ import com.google.common.base.Predicates; import com.google.inject.Inject; import com.netflix.astyanax.recipes.storage.ChunkedStorage; import com.netflix.astyanax.recipes.storage.ObjectMetadata; - import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import org.apache.commons.io.FileUtils; import java.io.File; import java.io.FileOutputStream; @@ -60,7 +59,9 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data String key = (String) segment.getLoadSpec().get("key"); getSegmentFiles(key, outDir); } - public io.druid.java.util.common.FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException{ + public FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) + throws SegmentLoadingException + { log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir); if (!outDir.exists()) { outDir.mkdirs(); @@ -74,13 +75,13 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data final File tmpFile = new File(outDir, "index.zip"); log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath()); - final io.druid.java.util.common.FileUtils.FileCopyResult localResult; + final FileUtils.FileCopyResult localResult; try { localResult = RetryUtils.retry( - new Callable() + new Callable() { @Override - public io.druid.java.util.common.FileUtils.FileCopyResult call() throws Exception + public FileUtils.FileCopyResult call() throws Exception { try (OutputStream os = new FileOutputStream(tmpFile)) { final ObjectMetadata meta = ChunkedStorage @@ -89,17 +90,18 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data .withConcurrencyLevel(CONCURRENCY) .call(); } - return new io.druid.java.util.common.FileUtils.FileCopyResult(tmpFile); + return new FileUtils.FileCopyResult(tmpFile); } }, Predicates.alwaysTrue(), 10 ); - }catch (Exception e){ + } + catch (Exception e) { throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key, tmpFile.getAbsolutePath()); } try{ - final io.druid.java.util.common.FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir); + final FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir); log.info( "Pull of file[%s] completed in %,d millis (%s bytes)", key, System.currentTimeMillis() - startTime, result.size() @@ -108,15 +110,16 @@ public class CassandraDataSegmentPuller extends CassandraStorage implements Data } catch (Exception e) { try { - FileUtils.deleteDirectory(outDir); + org.apache.commons.io.FileUtils.deleteDirectory(outDir); } catch (IOException e1) { log.error(e1, "Error clearing segment directory [%s]", outDir.getAbsolutePath()); e.addSuppressed(e1); } throw new SegmentLoadingException(e, e.getMessage()); - } finally { - if(!tmpFile.delete()){ + } + finally { + if (!tmpFile.delete()) { log.warn("Could not delete cache file at [%s]", tmpFile.getAbsolutePath()); } } diff --git a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java index b26253e03ed..37cbea4623b 100644 --- a/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java +++ b/extensions-contrib/cassandra-storage/src/main/java/io/druid/storage/cassandra/CassandraDataSegmentPusher.java @@ -88,8 +88,7 @@ public class CassandraDataSegmentPusher extends CassandraStorage implements Data int version = SegmentUtils.getVersionFromDir(indexFilesDir); - try - { + try { long start = System.currentTimeMillis(); ChunkedStorage.newWriter(indexStorage, key, new FileInputStream(compressedIndexFile)) .withConcurrencyLevel(CONCURRENCY).call(); @@ -100,8 +99,8 @@ public class CassandraDataSegmentPusher extends CassandraStorage implements Data .putColumn("descriptor", json, null); mutation.execute(); log.info("Wrote index to C* in [%s] ms", System.currentTimeMillis() - start); - } catch (Exception e) - { + } + catch (Exception e) { throw new IOException(e); } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/GoogleBlob.java b/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/GoogleBlob.java index d0109973d87..053a4569aeb 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/GoogleBlob.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/GoogleBlob.java @@ -22,23 +22,27 @@ package io.druid.firehose.google; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; -public class GoogleBlob { +public class GoogleBlob +{ private final String bucket; private final String path; @JsonCreator - public GoogleBlob(@JsonProperty("bucket") String bucket, @JsonProperty("path") String path) { + public GoogleBlob(@JsonProperty("bucket") String bucket, @JsonProperty("path") String path) + { this.bucket = bucket; this.path = path; } @JsonProperty - public String getBucket() { + public String getBucket() + { return bucket; } @JsonProperty - public String getPath() { + public String getPath() + { return path; } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/StaticGoogleBlobStoreFirehoseFactory.java b/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/StaticGoogleBlobStoreFirehoseFactory.java index 4f8afca76ca..632850f399b 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/StaticGoogleBlobStoreFirehoseFactory.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/firehose/google/StaticGoogleBlobStoreFirehoseFactory.java @@ -54,7 +54,8 @@ public class StaticGoogleBlobStoreFirehoseFactory extends PrefetchableTextFilesF } @JsonProperty - public List getBlobs() { + public List getBlobs() + { return blobs; } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java index 33be247a22a..adb3874e5e2 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java @@ -166,7 +166,8 @@ public class GoogleDataSegmentPusher implements DataSegmentPusher return makeLoadSpec(config.getBucket(),finalIndexZipFilePath.getPath().substring(1)); } - private Map makeLoadSpec(String bucket, String path) { + private Map makeLoadSpec(String bucket, String path) + { return ImmutableMap.of( "type", GoogleStorageDruidModule.SCHEME, "bucket", bucket, diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorage.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorage.java index 2ebc377e18f..f7e2bbc6440 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorage.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleStorage.java @@ -58,7 +58,8 @@ public class GoogleStorage { try { return storage.objects().get(bucket, path).executeUsingHead().isSuccessStatusCode(); - } catch (Exception e) { + } + catch (Exception e) { return false; } } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java index b3273e78aa8..9ec0f20275f 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java @@ -32,20 +32,23 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; -public class GoogleTaskLogs implements TaskLogs { +public class GoogleTaskLogs implements TaskLogs +{ private static final Logger LOG = new Logger(GoogleTaskLogs.class); private final GoogleTaskLogsConfig config; private final GoogleStorage storage; @Inject - public GoogleTaskLogs(GoogleTaskLogsConfig config, GoogleStorage storage) { + public GoogleTaskLogs(GoogleTaskLogsConfig config, GoogleStorage storage) + { this.config = config; this.storage = storage; } @Override - public void pushTaskLog(final String taskid, final File logFile) throws IOException { + public void pushTaskLog(final String taskid, final File logFile) throws IOException + { final String taskKey = getTaskLogKey(taskid); LOG.info("Pushing task log %s to: %s", logFile, taskKey); @@ -58,7 +61,8 @@ public class GoogleTaskLogs implements TaskLogs { } @Override - public Optional streamTaskLog(final String taskid, final long offset) throws IOException { + public Optional streamTaskLog(final String taskid, final long offset) throws IOException + { final String taskKey = getTaskLogKey(taskid); try { @@ -69,9 +73,11 @@ public class GoogleTaskLogs implements TaskLogs { final long length = storage.size(config.getBucket(), taskKey); return Optional.of( - new ByteSource() { + new ByteSource() + { @Override - public InputStream openStream() throws IOException { + public InputStream openStream() throws IOException + { try { final long start; @@ -87,18 +93,21 @@ public class GoogleTaskLogs implements TaskLogs { stream.skip(start); return stream; - } catch(Exception e) { + } + catch(Exception e) { throw new IOException(e); } } } ); - } catch (IOException e) { + } + catch (IOException e) { throw new IOE(e, "Failed to stream logs from: %s", taskKey); } } - private String getTaskLogKey(String taskid) { + private String getTaskLogKey(String taskid) + { return config.getPrefix() + "/" + taskid.replaceAll(":", "_"); } diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogsConfig.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogsConfig.java index 516b6d65c72..8f9991184e8 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogsConfig.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogsConfig.java @@ -23,7 +23,8 @@ import com.fasterxml.jackson.annotation.JsonProperty; import javax.validation.constraints.NotNull; -public class GoogleTaskLogsConfig { +public class GoogleTaskLogsConfig +{ @JsonProperty @NotNull private final String bucket; @@ -32,16 +33,19 @@ public class GoogleTaskLogsConfig { @NotNull private final String prefix; - public GoogleTaskLogsConfig(@JsonProperty("bucket") String bucket, @JsonProperty("prefix") String prefix) { + public GoogleTaskLogsConfig(@JsonProperty("bucket") String bucket, @JsonProperty("prefix") String prefix) + { this.bucket = bucket; this.prefix = prefix; } - public String getBucket() { + public String getBucket() + { return bucket; } - public String getPrefix() { + public String getPrefix() + { return prefix; } } diff --git a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleTaskLogsTest.java b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleTaskLogsTest.java index e677d5481ee..a3f0ec4ae68 100644 --- a/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleTaskLogsTest.java +++ b/extensions-contrib/google-extensions/src/test/java/io/druid/storage/google/GoogleTaskLogsTest.java @@ -41,7 +41,8 @@ import java.nio.charset.StandardCharsets; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; -public class GoogleTaskLogsTest extends EasyMockSupport { +public class GoogleTaskLogsTest extends EasyMockSupport +{ private static final String bucket = "test"; private static final String prefix = "test/log"; private static final String taskid = "taskid"; @@ -50,14 +51,16 @@ public class GoogleTaskLogsTest extends EasyMockSupport { private GoogleTaskLogs googleTaskLogs; @Before - public void before() { + public void before() + { storage = createMock(GoogleStorage.class); GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(bucket, prefix); googleTaskLogs = new GoogleTaskLogs(config, storage); } @Test - public void testPushTaskLog() throws Exception { + public void testPushTaskLog() throws Exception + { final File tmpDir = Files.createTempDir(); try { @@ -74,13 +77,15 @@ public class GoogleTaskLogsTest extends EasyMockSupport { googleTaskLogs.pushTaskLog(taskid, logFile); verifyAll(); - } finally { + } + finally { FileUtils.deleteDirectory(tmpDir); } } @Test - public void testStreamTaskLogWithoutOffset() throws Exception { + public void testStreamTaskLogWithoutOffset() throws Exception + { final String testLog = "hello this is a log"; final String logPath = prefix + "/" + taskid; @@ -100,7 +105,8 @@ public class GoogleTaskLogsTest extends EasyMockSupport { } @Test - public void testStreamTaskLogWithPositiveOffset() throws Exception { + public void testStreamTaskLogWithPositiveOffset() throws Exception + { final String testLog = "hello this is a log"; final String logPath = prefix + "/" + taskid; @@ -120,7 +126,8 @@ public class GoogleTaskLogsTest extends EasyMockSupport { } @Test - public void testStreamTaskLogWithNegative() throws Exception { + public void testStreamTaskLogWithNegative() throws Exception + { final String testLog = "hello this is a log"; final String logPath = prefix + "/" + taskid; diff --git a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterModule.java b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterModule.java index bc37b7fcd0e..651ee186911 100644 --- a/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterModule.java +++ b/extensions-contrib/graphite-emitter/src/main/java/io/druid/emitter/graphite/GraphiteEmitterModule.java @@ -55,7 +55,8 @@ public class GraphiteEmitterModule implements DruidModule @Provides @ManageLifecycle @Named(EMITTER_TYPE) - public Emitter getEmitter(GraphiteEmitterConfig graphiteEmitterConfig, ObjectMapper mapper, final Injector injector){ + public Emitter getEmitter(GraphiteEmitterConfig graphiteEmitterConfig, ObjectMapper mapper, final Injector injector) + { List emitters = Lists.transform( graphiteEmitterConfig.getAlertEmitters(), new Function() diff --git a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcExtensionsModule.java b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcExtensionsModule.java index 92d51a56ca7..aee9593e6f2 100644 --- a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcExtensionsModule.java +++ b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcExtensionsModule.java @@ -30,7 +30,8 @@ import java.util.List; public class OrcExtensionsModule implements DruidModule { @Override - public List getJacksonModules() { + public List getJacksonModules() + { return Collections.singletonList( new SimpleModule("OrcInputRowParserModule") .registerSubtypes( @@ -40,7 +41,7 @@ public class OrcExtensionsModule implements DruidModule } @Override - public void configure(Binder binder) { - + public void configure(Binder binder) + { } } diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java index 74e1f30afd8..60cd33d56f9 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/DruidOrcInputFormatTest.java @@ -141,8 +141,7 @@ public class DruidOrcInputFormatTest ListColumnVector listColumnVector = (ListColumnVector) batch.cols[2]; listColumnVector.childCount = col2.length; listColumnVector.lengths[0] = 3; - for (int idx = 0; idx < col2.length; idx++) - { + for (int idx = 0; idx < col2.length; idx++) { ((BytesColumnVector) listColumnVector.child).setRef( idx, StringUtils.toUtf8(col2[idx]), diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index 114a8924682..ff301b55b88 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -302,8 +302,7 @@ public class OrcIndexGeneratorJobTest QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir); QueryableIndexIndexableAdapter adapter = new QueryableIndexIndexableAdapter(index); - for(Rowboat row: adapter.getRows()) - { + for (Rowboat row: adapter.getRows()) { Object[] metrics = row.getMetrics(); rowCount++; diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java index 4a1633aaa38..e8696d1a0a8 100644 --- a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java +++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java @@ -29,9 +29,11 @@ import io.druid.initialization.DruidModule; import java.util.Arrays; import java.util.List; -public class ScanQueryDruidModule implements DruidModule { +public class ScanQueryDruidModule implements DruidModule +{ @Override - public void configure(Binder binder) { + public void configure(Binder binder) + { DruidBinders.queryToolChestBinder(binder) .addBinding(ScanQuery.class) .to(ScanQueryQueryToolChest.class) @@ -44,7 +46,8 @@ public class ScanQueryDruidModule implements DruidModule { } @Override - public List getJacksonModules() { + public List getJacksonModules() + { return Arrays.asList( new SimpleModule("ScanQueryDruidModule") .registerSubtypes( diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java index 53d668b3e11..bb23fdf5b72 100644 --- a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java +++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java @@ -87,8 +87,7 @@ public class ScanQueryEngine allColumns.addAll(query.getColumns()); allDims.retainAll(query.getColumns()); allMetrics.retainAll(query.getColumns()); - } - else { + } else { if (!allDims.contains(ScanResultValue.timestampKey)) { allColumns.add(ScanResultValue.timestampKey); } diff --git a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java index f52b5051840..a8f80a18ee1 100644 --- a/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java +++ b/extensions-contrib/scan-query/src/test/java/io/druid/query/scan/ScanQueryRunnerTest.java @@ -606,7 +606,8 @@ public class ScanQueryRunnerTest return results; } - private Iterable compactedListToRow(Iterable results) { + private Iterable compactedListToRow(Iterable results) + { return Iterables.transform(results, new Function() { @Override diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java index 920620acaf6..2e1d6b36c59 100644 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java +++ b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java @@ -194,7 +194,8 @@ public class SQLServerConnector extends SQLMetadataConnector } @Override - public String getQuoteString() { + public String getQuoteString() + { return QUOTE_STRING; } diff --git a/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java b/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java index 550fc2f2002..3ddc2bd9257 100644 --- a/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java +++ b/extensions-contrib/sqlserver-metadata-storage/src/test/java/io/druid/metadata/storage/sqlserver/CustomStatementRewriterTest.java @@ -85,8 +85,9 @@ public class CustomStatementRewriterTest // - // https://en.wikipedia.org/wiki/List_of_Unicode_characters Assert.fail("Expected 'UnableToCreateStatementException'"); - } catch (UnableToCreateStatementException e) { - // + } + catch (UnableToCreateStatementException e) { + // expected } } diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java index ffe66f78298..8ad1b5f46ce 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampAggregator.java @@ -59,7 +59,8 @@ public class TimestampAggregator implements Aggregator } @Override - public void aggregate() { + public void aggregate() + { Long value = TimestampAggregatorFactory.convertLong(timestampSpec, selector.get()); if (value != null) { diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMaxAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMaxAggregatorFactory.java index 3233e645bdf..8a11ba8b487 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMaxAggregatorFactory.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMaxAggregatorFactory.java @@ -22,8 +22,7 @@ package io.druid.query.aggregation; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; - -import java.util.Comparator; +import com.google.common.collect.Ordering; public class TimestampMaxAggregatorFactory extends TimestampAggregatorFactory { @@ -34,12 +33,7 @@ public class TimestampMaxAggregatorFactory extends TimestampAggregatorFactory @JsonProperty("timeFormat") String timeFormat ) { - super(name, fieldName, timeFormat, new Comparator() { - @Override - public int compare(Long o1, Long o2) { - return Long.compare(o1, o2); - } - }, Long.MIN_VALUE); + super(name, fieldName, timeFormat, Ordering.natural(), Long.MIN_VALUE); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); } diff --git a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMinAggregatorFactory.java b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMinAggregatorFactory.java index 00c40fba514..e5d4227e075 100644 --- a/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMinAggregatorFactory.java +++ b/extensions-contrib/time-min-max/src/main/java/io/druid/query/aggregation/TimestampMinAggregatorFactory.java @@ -22,8 +22,7 @@ package io.druid.query.aggregation; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; - -import java.util.Comparator; +import com.google.common.collect.Ordering; public class TimestampMinAggregatorFactory extends TimestampAggregatorFactory { @@ -34,12 +33,7 @@ public class TimestampMinAggregatorFactory extends TimestampAggregatorFactory @JsonProperty("timeFormat") String timeFormat ) { - super(name, fieldName, timeFormat, new Comparator() { - @Override - public int compare(Long o1, Long o2) { - return -(Long.compare(o1, o2)); - } - }, Long.MAX_VALUE); + super(name, fieldName, timeFormat, Ordering.natural().reverse(), Long.MAX_VALUE); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); } diff --git a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java index f62bcb2df11..84a8ceb56a4 100644 --- a/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java +++ b/extensions-core/avro-extensions/src/main/java/io/druid/data/input/avro/SchemaRepoBasedAvroBytesDecoder.java @@ -80,12 +80,14 @@ public class SchemaRepoBasedAvroBytesDecoder implements AvroBytesDe DatumReader reader = new GenericDatumReader<>(schema); try (ByteBufferInputStream inputStream = new ByteBufferInputStream(Collections.singletonList(bytes))) { return reader.read(null, DecoderFactory.get().binaryDecoder(inputStream, null)); - } catch (EOFException eof) { + } + catch (EOFException eof) { // waiting for avro v1.9.0 (#AVRO-813) throw new ParseException( eof, "Avro's unnecessary EOFException, detail: [%s]", "https://issues.apache.org/jira/browse/AVRO-813" ); - } catch (IOException e) { + } + catch (IOException e) { throw new ParseException(e, "Fail to decode avro message!"); } } diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java index 003c59261c5..607d6b71dc5 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroStreamInputRowParserTest.java @@ -95,7 +95,10 @@ public class AvroStreamInputRowParserTest { @Nullable @Override - public Integer apply(@Nullable CharSequence input) { return Integer.parseInt(input.toString()); } + public Integer apply(@Nullable CharSequence input) + { + return Integer.parseInt(input.toString()); + } } ); public static final Map SOME_STRING_VALUE_MAP_VALUE = Maps.asMap( @@ -103,16 +106,14 @@ public class AvroStreamInputRowParserTest { @Nullable @Override - public CharSequence apply(@Nullable CharSequence input) { return input.toString(); } + public CharSequence apply(@Nullable CharSequence input) + { + return input.toString(); + } } ); public static final String SOME_UNION_VALUE = "string as union"; public static final ByteBuffer SOME_BYTES_VALUE = ByteBuffer.allocate(8); - private static final Function TO_STRING_INCLUDING_NULL = new Function() - { - @Override - public String apply(Object o) { return String.valueOf(o); } - }; private final ObjectMapper jsonMapper = new ObjectMapper(); @@ -195,11 +196,11 @@ public class AvroStreamInputRowParserTest assertEquals(Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), inputRow.getDimension(SOME_OTHER_ID)); assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID)); assertEquals( - Lists.transform(SOME_INT_ARRAY_VALUE, TO_STRING_INCLUDING_NULL), + Lists.transform(SOME_INT_ARRAY_VALUE, String::valueOf), inputRow.getDimension("someIntArray") ); assertEquals( - Lists.transform(SOME_STRING_ARRAY_VALUE, TO_STRING_INCLUDING_NULL), + Lists.transform(SOME_STRING_ARRAY_VALUE, String::valueOf), inputRow.getDimension("someStringArray") ); // towards Map avro field as druid dimension, need to convert its toString() back to HashMap to check equality diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java index 27393b3e3e3..d171821051a 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java @@ -113,7 +113,8 @@ public class DruidKerberosUtil } } - public static boolean needToSendCredentials(CookieStore cookieStore, URI uri){ + public static boolean needToSendCredentials(CookieStore cookieStore, URI uri) + { return getAuthCookie(cookieStore, uri) == null; } diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsKerberosConfig.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsKerberosConfig.java index 83f870e91fd..ad217bea07c 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsKerberosConfig.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsKerberosConfig.java @@ -31,7 +31,8 @@ public class HdfsKerberosConfig private final String keytab; @JsonCreator - public HdfsKerberosConfig(@JsonProperty("principal") String principal,@JsonProperty("keytab") String keytab) { + public HdfsKerberosConfig(@JsonProperty("principal") String principal, @JsonProperty("keytab") String keytab) + { this.principal = principal; this.keytab = keytab; } diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index 8a835caa5b0..752253570d7 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -245,8 +245,7 @@ public class HdfsDataSegmentFinderTest updatedSegment4_1 = dataSegment; } else if (dataSegment.getIdentifier().equals(SEGMENT_5.getIdentifier())) { updatedSegment5 = dataSegment; - } - else { + } else { Assert.fail("Unexpected segment"); } } diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java index da4b0851375..a35d705c234 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/ApproximateHistogram.java @@ -191,17 +191,35 @@ public class ApproximateHistogram ); } - public long count() { return count; } + public long count() + { + return count; + } - public float min() { return min; } + public float min() + { + return min; + } - public float max() { return max; } + public float max() + { + return max; + } - public int binCount() { return binCount; } + public int binCount() + { + return binCount; + } - public int capacity() { return size; } + public int capacity() + { + return size; + } - public float[] positions() { return Arrays.copyOfRange(positions, 0, binCount); } + public float[] positions() + { + return Arrays.copyOfRange(positions, 0, binCount); + } public long[] bins() { @@ -239,9 +257,15 @@ public class ApproximateHistogram return exactCount; } - public float getMin() { return this.min;} + public float getMin() + { + return this.min; + } - public float getMax() { return this.max;} + public float getMax() + { + return this.max; + } private static long sumBins(long[] bins, int binCount) { @@ -1491,10 +1515,8 @@ public class ApproximateHistogram // add full bin count if left bin count is exact if (exact0) { return (s + m0); - } - - // otherwise add only the left half of the bin - else { + } else { + // otherwise add only the left half of the bin return (s + 0.5 * m0); } } diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java index e4e2a0af525..31eff16cdba 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java @@ -578,7 +578,9 @@ public class ApproximateHistogramTest ); } - @Test public void testEmptyHistogram() { + @Test + public void testEmptyHistogram() + { ApproximateHistogram h = new ApproximateHistogram(50); Assert.assertArrayEquals( new float[]{Float.NaN, Float.NaN}, diff --git a/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorIntrospectionHandler.java b/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorIntrospectionHandler.java index fb2ee96d8f3..22854b4c154 100644 --- a/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorIntrospectionHandler.java +++ b/extensions-core/kafka-extraction-namespace/src/main/java/io/druid/query/lookup/KafkaLookupExtractorIntrospectionHandler.java @@ -28,7 +28,10 @@ public class KafkaLookupExtractorIntrospectionHandler implements LookupIntrospec { private KafkaLookupExtractorFactory kafkaLookupExtractorFactory; - public KafkaLookupExtractorIntrospectionHandler(KafkaLookupExtractorFactory kafkaLookupExtractorFactory) {this.kafkaLookupExtractorFactory = kafkaLookupExtractorFactory;} + public KafkaLookupExtractorIntrospectionHandler(KafkaLookupExtractorFactory kafkaLookupExtractorFactory) + { + this.kafkaLookupExtractorFactory = kafkaLookupExtractorFactory; + } @GET public Response getActive() diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java index 13d7f191651..51beda96946 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/KafkaLookupExtractorFactoryTest.java @@ -299,7 +299,8 @@ public class KafkaLookupExtractorFactoryTest EasyMock.expectLastCall().andAnswer(new IAnswer() { @Override - public Object answer() throws Throwable { + public Object answer() throws Throwable + { threadWasInterrupted.set(Thread.currentThread().isInterrupted()); return null; } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index a895de01e38..ac3b8fbafea 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -982,6 +982,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport } @Override - void checkConnection(String host, int port) throws IOException { } + void checkConnection(String host, int port) throws IOException + { + } } } diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/CacheScheduler.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/CacheScheduler.java index d135b0e4883..38fcce978aa 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/CacheScheduler.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/CacheScheduler.java @@ -137,8 +137,8 @@ public final class CacheScheduler * that would be a leak preventing the Entry to be collected by GC, and therefore {@link #entryCleaner} to be run by * the JVM. Also, {@link #entryCleaner} must not reference the Entry through it's Runnable hunk. */ - public class EntryImpl implements AutoCloseable { - + public class EntryImpl implements AutoCloseable + { private final T namespace; private final String asString; private final AtomicReference cacheStateHolder = new AtomicReference(NoCache.CACHE_NOT_INITIALIZED); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java index c7d3fd5c0ac..414939d7317 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/cache/NamespaceExtractionCacheManager.java @@ -45,7 +45,8 @@ public abstract class NamespaceExtractionCacheManager private final ScheduledThreadPoolExecutor scheduledExecutorService; - public NamespaceExtractionCacheManager(final Lifecycle lifecycle, final ServiceEmitter serviceEmitter) { + public NamespaceExtractionCacheManager(final Lifecycle lifecycle, final ServiceEmitter serviceEmitter) + { this.scheduledExecutorService = new ScheduledThreadPoolExecutor( 1, new ThreadFactoryBuilder() diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java index 97e0acddaf8..3534b322db3 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/LoadingLookup.java @@ -70,7 +70,7 @@ public class LoadingLookup extends LookupExtractor } final String presentVal; try { - presentVal = loadingCache.get(key, new applyCallable(key)); + presentVal = loadingCache.get(key, new ApplyCallable(key)); return Strings.emptyToNull(presentVal); } catch (ExecutionException e) { @@ -88,7 +88,7 @@ public class LoadingLookup extends LookupExtractor } final List retList; try { - retList = reverseLoadingCache.get(value, new unapplyCallable(value)); + retList = reverseLoadingCache.get(value, new UnapplyCallable(value)); return retList; } catch (ExecutionException e) { @@ -120,11 +120,14 @@ public class LoadingLookup extends LookupExtractor return LookupExtractionModule.getRandomCacheKey(); } - private class applyCallable implements Callable + private class ApplyCallable implements Callable { private final String key; - public applyCallable(String key) {this.key = key;} + public ApplyCallable(String key) + { + this.key = key; + } @Override public String call() throws Exception @@ -134,11 +137,14 @@ public class LoadingLookup extends LookupExtractor } } - private class unapplyCallable implements Callable> + private class UnapplyCallable implements Callable> { private final String value; - public unapplyCallable(String value) {this.value = value;} + public UnapplyCallable(String value) + { + this.value = value; + } @Override public List call() throws Exception diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java index 4bd0ad953b2..c58e0330f3f 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/PollingLookup.java @@ -215,7 +215,10 @@ public class PollingLookup extends LookupExtractor private final PollingCache pollingCache; private final AtomicLong refCounts = new AtomicLong(0L); - CacheRefKeeper(PollingCache pollingCache) {this.pollingCache = pollingCache;} + CacheRefKeeper(PollingCache pollingCache) + { + this.pollingCache = pollingCache; + } PollingCache getAndIncrementRef() { diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupFactoryTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupFactoryTest.java index e59c3cabff8..f69844a9b78 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupFactoryTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupFactoryTest.java @@ -63,7 +63,8 @@ public class PollingLookupFactoryTest } @Test - public void testGet(){ + public void testGet() + { Assert.assertEquals(pollingLookup, pollingLookupFactory.get()); } diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupSerDeserTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupSerDeserTest.java index df994502ee4..ce86d2abd0a 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupSerDeserTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupSerDeserTest.java @@ -52,9 +52,14 @@ public class PollingLookupSerDeserTest }); } - public PollingLookupSerDeserTest(PollingCacheFactory cacheFactory) {this.cacheFactory = cacheFactory;} - private final PollingCacheFactory cacheFactory ; + private final PollingCacheFactory cacheFactory; private DataFetcher dataFetcher = new MockDataFetcher(); + + public PollingLookupSerDeserTest(PollingCacheFactory cacheFactory) + { + this.cacheFactory = cacheFactory; + } + @Test public void testSerDeser() throws IOException { diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java index c7d6459d5eb..dee07f60332 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/PollingLookupTest.java @@ -69,7 +69,10 @@ public class PollingLookupTest @Override public Iterable fetchAll() { - if (callNumber == 0) {callNumber +=1; return firstLookupMap.entrySet();} + if (callNumber == 0) { + callNumber++; + return firstLookupMap.entrySet(); + } return secondLookupMap.entrySet(); } @@ -108,12 +111,14 @@ public class PollingLookupTest }); } - public PollingLookupTest(PollingCacheFactory pollingCacheFactory) {this.pollingCacheFactory = pollingCacheFactory;} - private final PollingCacheFactory pollingCacheFactory; private final DataFetcher dataFetcher = new MockDataFetcher(); private PollingLookup pollingLookup; + public PollingLookupTest(PollingCacheFactory pollingCacheFactory) + { + this.pollingCacheFactory = pollingCacheFactory; + } @Before public void setUp() throws InterruptedException diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java index c9c1f8f7bbb..b15de57b7ed 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/cache/loading/LoadingCacheTest.java @@ -59,7 +59,10 @@ public class LoadingCacheTest private final LoadingCache loadingCache; - public LoadingCacheTest(LoadingCache loadingCache) {this.loadingCache = loadingCache;} + public LoadingCacheTest(LoadingCache loadingCache) + { + this.loadingCache = loadingCache; + } @Before public void setUp() throws InterruptedException diff --git a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java index e8b4f4443d5..3079ad427ea 100644 --- a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java +++ b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java @@ -79,7 +79,8 @@ public class MySQLConnector extends SQLMetadataConnector } @Override - public String getQuoteString() { + public String getQuoteString() + { return QUOTE_STRING; } @@ -153,5 +154,8 @@ public class MySQLConnector extends SQLMetadataConnector } @Override - public DBI getDBI() { return dbi; } + public DBI getDBI() + { + return dbi; + } } diff --git a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java index 4c72130d26e..78189ce4291 100644 --- a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java +++ b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java @@ -65,7 +65,8 @@ public class PostgreSQLConnector extends SQLMetadataConnector } @Override - protected String getPayloadType() { + protected String getPayloadType() + { return PAYLOAD_TYPE; } @@ -76,7 +77,8 @@ public class PostgreSQLConnector extends SQLMetadataConnector } @Override - public String getQuoteString() { + public String getQuoteString() + { return QUOTE_STRING; } @@ -161,7 +163,10 @@ public class PostgreSQLConnector extends SQLMetadataConnector } @Override - public DBI getDBI() { return dbi; } + public DBI getDBI() + { + return dbi; + } @Override protected boolean connectorIsTransientException(Throwable e) diff --git a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java index bb16245c5b1..8a63067a76d 100644 --- a/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java +++ b/extensions-core/protobuf-extensions/src/test/java/io/druid/data/input/protobuf/ProtoTestEventWrapper.java @@ -819,7 +819,10 @@ public final class ProtoTestEventWrapper .parseWithIOException(PARSER, input, extensionRegistry); } - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() + { + return newBuilder(); + } public static Builder newBuilder() { @@ -1848,7 +1851,10 @@ public final class ProtoTestEventWrapper .parseWithIOException(PARSER, input, extensionRegistry); } - public Builder newBuilderForType() { return newBuilder(); } + public Builder newBuilderForType() + { + return newBuilder(); + } public static Builder newBuilder() { diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/AWSSessionCredentialsAdapter.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/AWSSessionCredentialsAdapter.java index abaa8f03076..7a64a81e7c9 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/AWSSessionCredentialsAdapter.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/AWSSessionCredentialsAdapter.java @@ -22,12 +22,14 @@ package io.druid.storage.s3; import com.amazonaws.auth.AWSCredentialsProvider; import org.jets3t.service.security.AWSSessionCredentials; -public class AWSSessionCredentialsAdapter extends AWSSessionCredentials { +public class AWSSessionCredentialsAdapter extends AWSSessionCredentials +{ private final AWSCredentialsProvider provider; - public AWSSessionCredentialsAdapter(AWSCredentialsProvider provider) { + public AWSSessionCredentialsAdapter(AWSCredentialsProvider provider) + { super(null, null, null); - if(provider.getCredentials() instanceof com.amazonaws.auth.AWSSessionCredentials) { + if (provider.getCredentials() instanceof com.amazonaws.auth.AWSSessionCredentials) { this.provider = provider; } else { throw new IllegalArgumentException("provider does not contain session credentials"); @@ -35,27 +37,32 @@ public class AWSSessionCredentialsAdapter extends AWSSessionCredentials { } @Override - protected String getTypeName() { + protected String getTypeName() + { return "AWSSessionCredentialsAdapter"; } @Override - public String getVersionPrefix() { + public String getVersionPrefix() + { return "AWSSessionCredentialsAdapter, version: "; } @Override - public String getAccessKey() { + public String getAccessKey() + { return provider.getCredentials().getAWSAccessKeyId(); } @Override - public String getSecretKey() { + public String getSecretKey() + { return provider.getCredentials().getAWSSecretKey(); } @Override - public String getSessionToken() { + public String getSessionToken() + { com.amazonaws.auth.AWSSessionCredentials sessionCredentials = (com.amazonaws.auth.AWSSessionCredentials) provider.getCredentials(); return sessionCredentials.getSessionToken(); diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java index fb5a23cbeae..6eb7f79fe7c 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentFinder.java @@ -113,11 +113,14 @@ public class S3DataSegmentFinder implements DataSegmentFinder } } } - } catch (ServiceException e) { + } + catch (ServiceException e) { throw new SegmentLoadingException(e, "Problem interacting with S3"); - } catch (IOException e) { + } + catch (IOException e) { throw new SegmentLoadingException(e, "IO exception"); - } catch (Exception e) { + } + catch (Exception e) { Throwables.propagateIfInstanceOf(e, SegmentLoadingException.class); Throwables.propagate(e); } diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java index f4043b98c56..64f4aed7427 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentFinderTest.java @@ -296,7 +296,8 @@ public class S3DataSegmentFinderTest } @Test - public void testFindSegmentsUpdateLoadSpec() throws Exception { + public void testFindSegmentsUpdateLoadSpec() throws Exception + { config.setBucket("amazing"); final DataSegment segmentMissingLoadSpec = DataSegment.builder(SEGMENT_1) .loadSpec(ImmutableMap.of()) diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java index 4e74a433043..90bcac57b66 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentMoverTest.java @@ -158,7 +158,8 @@ public class S3DataSegmentMoverTest ), ImmutableMap.of("bucket", "DOES NOT EXIST", "baseKey", "baseKey2")); } - private static class MockStorageService extends RestS3Service { + private static class MockStorageService extends RestS3Service + { Map> storage = Maps.newHashMap(); boolean moved = false; @@ -167,7 +168,8 @@ public class S3DataSegmentMoverTest super(null); } - public boolean didMove() { + public boolean didMove() + { return moved; } @@ -187,7 +189,8 @@ public class S3DataSegmentMoverTest object.setStorageClass(S3Object.STORAGE_CLASS_STANDARD); return new S3Object[]{object}; } - } catch (ServiceException e) { + } + catch (ServiceException e) { // return empty list } return new S3Object[]{}; diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java index dee6457a5a2..a8bcbde4e8c 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPusherTest.java @@ -44,14 +44,17 @@ import java.io.File; */ public class S3DataSegmentPusherTest { - private static class ValueContainer { + private static class ValueContainer + { private T value; - public T getValue() { + public T getValue() + { return value; } - public void setValue(T value) { + public void setValue(T value) + { this.value = value; } } @@ -68,9 +71,11 @@ public class S3DataSegmentPusherTest ValueContainer capturedS3SegmentJson = new ValueContainer<>(); EasyMock.expect(s3Client.putObject(EasyMock.anyString(), EasyMock.capture(capturedS3Object))) .andAnswer( - new IAnswer() { + new IAnswer() + { @Override - public S3Object answer() throws Throwable { + public S3Object answer() throws Throwable + { capturedS3SegmentJson.setValue( IOUtils.toString(capturedS3Object.getValue().getDataInputStream(), "utf-8") ); diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/TestFileSessionCredentialsProvider.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/TestFileSessionCredentialsProvider.java index ddd91cadd31..49685336b5b 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/TestFileSessionCredentialsProvider.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/TestFileSessionCredentialsProvider.java @@ -33,12 +33,14 @@ import java.nio.charset.StandardCharsets; import static org.junit.Assert.assertEquals; -public class TestFileSessionCredentialsProvider { +public class TestFileSessionCredentialsProvider +{ @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test - public void test() throws IOException { + public void test() throws IOException + { File file = folder.newFile(); try (BufferedWriter out = Files.newWriter(file, StandardCharsets.UTF_8)) { out.write("sessionToken=sessionTokenSample\nsecretKey=secretKeySample\naccessKey=accessKeySample\n"); diff --git a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java index 4ab6bc25a97..3c388ad8c3a 100644 --- a/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java +++ b/extensions-core/stats/src/main/java/io/druid/query/aggregation/variance/VarianceAggregatorCollector.java @@ -48,7 +48,8 @@ import java.util.Comparator; */ public class VarianceAggregatorCollector { - public static boolean isVariancePop(String estimator) { + public static boolean isVariancePop(String estimator) + { return estimator != null && estimator.equalsIgnoreCase("population"); } diff --git a/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java b/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java index e09f35d602e..3b2a283c005 100644 --- a/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java +++ b/hll/src/main/java/io/druid/hll/HyperLogLogCollector.java @@ -118,7 +118,8 @@ public abstract class HyperLogLogCollector implements Comparable getJobProperties() { return jobProperties; } + public Map getJobProperties() + { + return jobProperties; + } } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java index 59843aaa9bd..c36f1343d4a 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceInputFormatTest.java @@ -174,7 +174,8 @@ public class DatasourceInputFormatTest return new TextInputFormat() { @Override - protected boolean isSplitable(FileSystem fs, Path file) { + protected boolean isSplitable(FileSystem fs, Path file) + { return false; } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java index 38057266de4..32b23140836 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/HadoopGlobPathSplitterTest.java @@ -31,7 +31,8 @@ import java.util.List; public class HadoopGlobPathSplitterTest { @Test - public void testGlobSplitting() throws Exception { + public void testGlobSplitting() throws Exception + { String path = "/a/b/c"; List expected = ImmutableList.of( "/a/b/c" @@ -126,7 +127,8 @@ public class HadoopGlobPathSplitterTest Assert.assertEquals(expected, splitGlob(path)); } - private static List splitGlob(String path) { + private static List splitGlob(String path) + { return Lists.newArrayList(HadoopGlobPathSplitter.splitGlob(path)); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java index 8fd5b4df6fa..6d4d816bc8c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskLocation.java @@ -67,7 +67,8 @@ public class TaskLocation } @JsonProperty - public int getTlsPort() { + public int getTlsPort() + { return tlsPort; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java index 285f89c7203..2d0ce1ac9b6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/TaskToolbox.java @@ -252,7 +252,8 @@ public class TaskToolbox return cacheConfig; } - public IndexMergerV9 getIndexMergerV9() { + public IndexMergerV9 getIndexMergerV9() + { return indexMergerV9; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java index ccc96d83b5d..a908f19fd4e 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java @@ -148,7 +148,8 @@ public class RemoteTaskActionClient implements TaskActionClient } } - private long jitter(long input){ + private long jitter(long input) + { final double jitter = random.nextGaussian() * input / 4.0; long retval = input + (long)jitter; return retval < 0 ? 0 : retval; diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java index 6d9292db624..f6d9b3e860c 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java @@ -105,7 +105,8 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask this.segments = segments; } - protected void verifyInputSegments(List segments) { + protected void verifyInputSegments(List segments) + { // Verify segments are all unsharded Preconditions.checkArgument( Iterables.size( diff --git a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/TaskRunnerTaskLogStreamer.java b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/TaskRunnerTaskLogStreamer.java index 24563f3da8d..f2905f3e336 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/TaskRunnerTaskLogStreamer.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/TaskRunnerTaskLogStreamer.java @@ -35,9 +35,8 @@ public class TaskRunnerTaskLogStreamer implements TaskLogStreamer private final TaskMaster taskMaster; @Inject - public TaskRunnerTaskLogStreamer( - final TaskMaster taskMaster - ) { + public TaskRunnerTaskLogStreamer(final TaskMaster taskMaster) + { this.taskMaster = taskMaster; } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java index 52237f654a7..6181a722e1f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/HeapMemoryTaskStorage.java @@ -85,7 +85,8 @@ public class HeapMemoryTaskStorage implements TaskStorage log.info("Inserting task %s with status: %s", task.getId(), status); tasks.put(task.getId(), new TaskStuff(task, status, new DateTime())); - } finally { + } + finally { giant.unlock(); } } @@ -102,7 +103,8 @@ public class HeapMemoryTaskStorage implements TaskStorage } else { return Optional.absent(); } - } finally { + } + finally { giant.unlock(); } } @@ -120,7 +122,8 @@ public class HeapMemoryTaskStorage implements TaskStorage Preconditions.checkState(tasks.get(taskid).getStatus().isRunnable(), "Task status must be runnable: %s", taskid); log.info("Updating task %s to status: %s", taskid, status); tasks.put(taskid, tasks.get(taskid).withStatus(status)); - } finally { + } + finally { giant.unlock(); } } @@ -137,7 +140,8 @@ public class HeapMemoryTaskStorage implements TaskStorage } else { return Optional.absent(); } - } finally { + } + finally { giant.unlock(); } } @@ -155,7 +159,8 @@ public class HeapMemoryTaskStorage implements TaskStorage } } return listBuilder.build(); - } finally { + } + finally { giant.unlock(); } } @@ -176,13 +181,14 @@ public class HeapMemoryTaskStorage implements TaskStorage return a.getCreatedDate().compareTo(b.getCreatedDate()); } }.reverse(); - for(final TaskStuff taskStuff : createdDateDesc.sortedCopy(tasks.values())) { + for (final TaskStuff taskStuff : createdDateDesc.sortedCopy(tasks.values())) { if(taskStuff.getStatus().isComplete() && taskStuff.getCreatedDate().getMillis() > recent) { returns.add(taskStuff.getStatus()); } } return returns; - } finally { + } + finally { giant.unlock(); } } @@ -195,7 +201,8 @@ public class HeapMemoryTaskStorage implements TaskStorage try { Preconditions.checkNotNull(taskLock, "taskLock"); taskLocks.put(taskid, taskLock); - } finally { + } + finally { giant.unlock(); } } @@ -208,7 +215,8 @@ public class HeapMemoryTaskStorage implements TaskStorage try { Preconditions.checkNotNull(taskLock, "taskLock"); taskLocks.remove(taskid, taskLock); - } finally { + } + finally { giant.unlock(); } } @@ -220,7 +228,8 @@ public class HeapMemoryTaskStorage implements TaskStorage try { return ImmutableList.copyOf(taskLocks.get(taskid)); - } finally { + } + finally { giant.unlock(); } } @@ -232,7 +241,8 @@ public class HeapMemoryTaskStorage implements TaskStorage try { taskActions.put(task.getId(), taskAction); - } finally { + } + finally { giant.unlock(); } } @@ -244,7 +254,8 @@ public class HeapMemoryTaskStorage implements TaskStorage try { return ImmutableList.copyOf(taskActions.get(taskid)); - } finally { + } + finally { giant.unlock(); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ImmutableWorkerInfo.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ImmutableWorkerInfo.java index 67ac2967149..ca9c72080ec 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ImmutableWorkerInfo.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ImmutableWorkerInfo.java @@ -74,7 +74,8 @@ public class ImmutableWorkerInfo return availabilityGroups; } - public int getAvailableCapacity() { + public int getAvailableCapacity() + { return getWorker().getCapacity() - getCurrCapacityUsed(); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index 4cc533bebfa..64bdf7a5601 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -1097,9 +1097,10 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer new Runnable() { @Override - public void run() { + public void run() + { long currentTimeStamp = System.currentTimeMillis(); - for(ZkWorker zkWorker : blackListedWorkers){ + for (ZkWorker zkWorker : blackListedWorkers) { cleanBlackListedNode(zkWorker, currentTimeStamp); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java index eae79c8add1..304d3d4105d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskLockbox.java @@ -183,7 +183,8 @@ public class TaskLockbox activeTasks.size(), storedLocks.size() - taskLockCount ); - } finally { + } + finally { giant.unlock(); } } @@ -283,7 +284,8 @@ public class TaskLockbox try { taskStorage.addLock(task.getId(), posseToUse.getTaskLock()); return Optional.of(posseToUse.getTaskLock()); - } catch(Exception e) { + } + catch (Exception e) { log.makeAlert("Failed to persist lock in storage") .addData("task", task.getId()) .addData("dataSource", posseToUse.getTaskLock().getDataSource()) @@ -402,7 +404,8 @@ public class TaskLockbox } } ); - } finally { + } + finally { giant.unlock(); } } @@ -449,7 +452,8 @@ public class TaskLockbox // Remove lock from storage. If it cannot be removed, just ignore the failure. try { taskStorage.removeLock(task.getId(), taskLock); - } catch(Exception e) { + } + catch (Exception e) { log.makeAlert(e, "Failed to clean up lock from storage") .addData("task", task.getId()) .addData("dataSource", taskLock.getDataSource()) @@ -460,13 +464,14 @@ public class TaskLockbox } } - if(!removed) { + if (!removed) { log.makeAlert("Lock release without acquire") .addData("task", task.getId()) .addData("interval", interval) .emit(); } - } finally { + } + finally { giant.unlock(); } } @@ -598,7 +603,8 @@ public class TaskLockbox try { log.info("Adding task[%s] to activeTasks", task.getId()); activeTasks.add(task.getId()); - } finally { + } + finally { giant.unlock(); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java index 1fabf2bf719..0de0b3cba1a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/TaskQueue.java @@ -329,13 +329,15 @@ public class TaskQueue } // Should always be called after taking giantLock - private void addTaskInternal(final Task task){ + private void addTaskInternal(final Task task) + { tasks.add(task); taskLockbox.add(task); } // Should always be called after taking giantLock - private void removeTaskInternal(final Task task){ + private void removeTaskInternal(final Task task) + { taskLockbox.remove(task); tasks.remove(task); } @@ -563,7 +565,8 @@ public class TaskQueue } } - private static Map toTaskIDMap(List taskList){ + private static Map toTaskIDMap(List taskList) + { Map rv = Maps.newHashMap(); for(Task task : taskList){ rv.put(task.getId(), task); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java index 366825878b0..5ef9ea0dacb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ZkWorker.java @@ -170,15 +170,18 @@ public class ZkWorker implements Closeable statusCache.close(); } - public int getCountinouslyFailedTasksCount() { + public int getCountinouslyFailedTasksCount() + { return countinouslyFailedTasksCount.get(); } - public void resetCountinouslyFailedTasksCount() { + public void resetCountinouslyFailedTasksCount() + { this.countinouslyFailedTasksCount.set(0); } - public void incrementCountinouslyFailedTasksCount() { + public void incrementCountinouslyFailedTasksCount() + { this.countinouslyFailedTasksCount.incrementAndGet(); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java index 10d6760f976..535d6f8f29a 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/config/RemoteTaskRunnerConfig.java @@ -72,7 +72,8 @@ public class RemoteTaskRunnerConfig extends WorkerTaskRunnerConfig return taskAssignmentTimeout; } - public Period getTaskCleanupTimeout(){ + public Period getTaskCleanupTimeout() + { return taskCleanupTimeout; } @@ -92,35 +93,43 @@ public class RemoteTaskRunnerConfig extends WorkerTaskRunnerConfig return pendingTasksRunnerNumThreads; } - public int getMaxRetriesBeforeBlacklist() { + public int getMaxRetriesBeforeBlacklist() + { return maxRetriesBeforeBlacklist; } - public void setMaxRetriesBeforeBlacklist(int maxRetriesBeforeBlacklist) { + public void setMaxRetriesBeforeBlacklist(int maxRetriesBeforeBlacklist) + { this.maxRetriesBeforeBlacklist = maxRetriesBeforeBlacklist; } - public Period getWorkerBlackListBackoffTime() { + public Period getWorkerBlackListBackoffTime() + { return workerBlackListBackoffTime; } - public void setWorkerBlackListBackoffTime(Period taskBlackListBackoffTime) { + public void setWorkerBlackListBackoffTime(Period taskBlackListBackoffTime) + { this.workerBlackListBackoffTime = taskBlackListBackoffTime; } - public Period getWorkerBlackListCleanupPeriod() { + public Period getWorkerBlackListCleanupPeriod() + { return workerBlackListCleanupPeriod; } - public void setWorkerBlackListCleanupPeriod(Period workerBlackListCleanupPeriod) { + public void setWorkerBlackListCleanupPeriod(Period workerBlackListCleanupPeriod) + { this.workerBlackListCleanupPeriod = workerBlackListCleanupPeriod; } - public double getMaxPercentageBlacklistWorkers() { + public double getMaxPercentageBlacklistWorkers() + { return maxPercentageBlacklistWorkers; } - public void setMaxPercentageBlacklistWorkers(int maxPercentageBlacklistWorkers) { + public void setMaxPercentageBlacklistWorkers(int maxPercentageBlacklistWorkers) + { this.maxPercentageBlacklistWorkers = maxPercentageBlacklistWorkers; } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/helpers/TaskLogAutoCleanerConfig.java b/indexing-service/src/main/java/io/druid/indexing/overlord/helpers/TaskLogAutoCleanerConfig.java index e400e94c4f4..856e076cf8d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/helpers/TaskLogAutoCleanerConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/helpers/TaskLogAutoCleanerConfig.java @@ -47,7 +47,8 @@ public class TaskLogAutoCleanerConfig @JsonProperty("initialDelay") Long initialDelay, @JsonProperty("delay") Long delay, @JsonProperty("durationToRetain") Long durationToRetain - ){ + ) + { if (enabled) { Preconditions.checkNotNull(durationToRetain, "durationToRetain must be provided."); } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java index fbc9aa8403d..e25fe5739f9 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/executor/ExecutorLifecycleConfig.java @@ -81,8 +81,7 @@ public class ExecutorLifecycleConfig { if ("stdin".equals(parentStreamName)) { return System.in; - } - else { + } else { throw new ISE("Unknown stream name[%s]", parentStreamName); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java index e10a6250b13..f58b6af5d31 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLockboxTest.java @@ -220,7 +220,8 @@ public class TaskLockboxTest Assert.assertEquals(beforeLocksInStorage, afterLocksInStorage); } - public static class SomeTask extends NoopTask { + public static class SomeTask extends NoopTask + { public SomeTask( @JsonProperty("id") String id, @@ -241,7 +242,9 @@ public class TaskLockboxTest } @Override - public String getGroupId() { return "someGroupId";} - + public String getGroupId() + { + return "someGroupId"; + } } } diff --git a/integration-tests/src/main/java/org/testng/TestNG.java b/integration-tests/src/main/java/org/testng/TestNG.java index 9535883b772..f375cea6298 100644 --- a/integration-tests/src/main/java/org/testng/TestNG.java +++ b/integration-tests/src/main/java/org/testng/TestNG.java @@ -1148,25 +1148,21 @@ public class TestNG m_start = System.currentTimeMillis(); - // - // Slave mode - // if (m_slavefileName != null) { + // + // Slave mode + // SuiteSlave slave = new SuiteSlave(m_slavefileName, this); slave.waitForSuites(); - } - - // - // Regular mode - // - else if (m_masterfileName == null) { + } else if (m_masterfileName == null) { + // + // Regular mode + // suiteRunners = runSuitesLocally(); - } - - // - // Master mode - // - else { + } else { + // + // Master mode + // SuiteDispatcher dispatcher = new SuiteDispatcher(m_masterfileName); suiteRunners = dispatcher.dispatch( getConfiguration(), diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java index 1ff4ab3d831..c74c46e733d 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/AbstractIndexerTest.java @@ -88,7 +88,8 @@ public abstract class AbstractIndexerTest waitForAllTasksToComplete(); } - protected void waitForAllTasksToComplete(){ + protected void waitForAllTasksToComplete() + { RetryUtil.retryUntilTrue( new Callable() { diff --git a/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java index c1ebe384de4..aae7b881362 100644 --- a/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java +++ b/java-util/src/main/java/io/druid/java/util/common/collect/Utils.java @@ -29,7 +29,8 @@ import java.util.Map; public class Utils { - public static Map zipMap(K[] keys, V[] values) { + public static Map zipMap(K[] keys, V[] values) + { Preconditions.checkArgument(values.length == keys.length, "number of values[%s] different than number of keys[%s]", values.length, keys.length); @@ -45,8 +46,7 @@ public class Utils Map retVal = new LinkedHashMap<>(); - for(int i = 0; i < values.length; ++i) - { + for(int i = 0; i < values.length; ++i) { retVal.put(keys[i], values[i]); } @@ -55,7 +55,8 @@ public class Utils /** Create a Map from iterables of keys and values. Will throw an exception if there are more keys than values, * or more values than keys. */ - public static Map zipMap(Iterable keys, Iterable values) { + public static Map zipMap(Iterable keys, Iterable values) + { Map retVal = new LinkedHashMap<>(); Iterator keysIter = keys.iterator(); @@ -90,11 +91,9 @@ public class Utils while (keysIter.hasNext()) { final K key = keysIter.next(); - if(valsIter.hasNext()) - { + if(valsIter.hasNext()) { retVal.put(key, valsIter.next()); - } - else { + } else { break; } diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index 5060eb4861c..1ec439dff89 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -121,7 +121,8 @@ public abstract class Granularity implements Cacheable public abstract DateTime toDate(String filePath, Formatter formatter); - public DateTime bucketEnd(DateTime time) { + public DateTime bucketEnd(DateTime time) + { return increment(bucketStart(time)); } diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java index 89dfba515f5..a74efb7ec3d 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/GranularityType.java @@ -203,8 +203,7 @@ public enum GranularityType case 1: if (vals[index] == 3) { return GranularityType.QUARTER; - } - else if (vals[index] == 1) { + } else if (vals[index] == 1) { return GranularityType.MONTH; } break; @@ -215,25 +214,20 @@ public enum GranularityType case 4: if (vals[index] == 6) { return GranularityType.SIX_HOUR; - } - else if (vals[index] == 1) { + } else if (vals[index] == 1) { return GranularityType.HOUR; } break; case 5: if (vals[index] == 30) { return GranularityType.THIRTY_MINUTE; - } - else if (vals[index] == 15) { + } else if (vals[index] == 15) { return GranularityType.FIFTEEN_MINUTE; - } - else if (vals[index] == 10) { + } else if (vals[index] == 10) { return GranularityType.TEN_MINUTE; - } - else if (vals[index] == 5) { + } else if (vals[index] == 5) { return GranularityType.FIVE_MINUTE; - } - else if (vals[index] == 1) { + } else if (vals[index] == 1) { return GranularityType.MINUTE; } break; diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java index 52c19433c89..7881ca654a8 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringAccumulator.java @@ -28,10 +28,8 @@ public class FilteringAccumulator implements Accumulator private final Predicate pred; private final Accumulator accumulator; - public FilteringAccumulator( - Predicate pred, - Accumulator accumulator - ) { + public FilteringAccumulator(Predicate pred, Accumulator accumulator) + { this.pred = pred; this.accumulator = accumulator; } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java index 997ae377371..d9b87598da3 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/FilteringYieldingAccumulator.java @@ -29,10 +29,8 @@ public class FilteringYieldingAccumulator extends YieldingAccumulato private volatile boolean didSomething = false; - public FilteringYieldingAccumulator( - Predicate pred, - YieldingAccumulator accumulator - ) { + public FilteringYieldingAccumulator(Predicate pred, YieldingAccumulator accumulator) + { this.pred = pred; this.accumulator = accumulator; } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java index 74a5ff48325..27b0f7a4fdc 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MappingAccumulator.java @@ -28,10 +28,8 @@ public class MappingAccumulator implements Accumula private final Function fn; private final Accumulator accumulator; - public MappingAccumulator( - Function fn, - Accumulator accumulator - ) { + public MappingAccumulator(Function fn, Accumulator accumulator) + { this.fn = fn; this.accumulator = accumulator; } diff --git a/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java b/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java index d069f001bae..eab774b00f0 100644 --- a/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java +++ b/java-util/src/main/java/io/druid/java/util/common/guava/MappingYieldingAccumulator.java @@ -31,7 +31,8 @@ public class MappingYieldingAccumulator extends Yie public MappingYieldingAccumulator( Function fn, YieldingAccumulator baseAccumulator - ) { + ) + { this.fn = fn; this.baseAccumulator = baseAccumulator; } diff --git a/java-util/src/main/java/io/druid/java/util/common/io/Closer.java b/java-util/src/main/java/io/druid/java/util/common/io/Closer.java index d5ff4780cf3..3212ff45410 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/Closer.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/Closer.java @@ -89,12 +89,14 @@ import java.util.Deque; * @since Guava 14.0 */ // Coffee's for {@link Closer closers} only. -public final class Closer implements Closeable { +public final class Closer implements Closeable +{ /** * Creates a new {@link Closer}. */ - public static Closer create() { + public static Closer create() + { return new Closer(); } @@ -113,7 +115,8 @@ public final class Closer implements Closeable { * @return the given {@code closeable} */ // close. this word no longer has any meaning to me. - public C register(@Nullable C closeable) { + public C register(@Nullable C closeable) + { if (closeable != null) { stack.addFirst(closeable); } @@ -134,7 +137,8 @@ public final class Closer implements Closeable { * @return this method does not return; it always throws * @throws IOException when the given throwable is an IOException */ - public RuntimeException rethrow(Throwable e) throws IOException { + public RuntimeException rethrow(Throwable e) throws IOException + { Preconditions.checkNotNull(e); thrown = e; Throwables.propagateIfPossible(e, IOException.class); @@ -155,8 +159,8 @@ public final class Closer implements Closeable { * @throws IOException when the given throwable is an IOException * @throws X when the given throwable is of the declared type X */ - public RuntimeException rethrow(Throwable e, Class declaredType) - throws IOException, X { + public RuntimeException rethrow(Throwable e, Class declaredType) throws IOException, X + { Preconditions.checkNotNull(e); thrown = e; Throwables.propagateIfPossible(e, IOException.class); @@ -180,7 +184,10 @@ public final class Closer implements Closeable { * @throws X2 when the given throwable is of the declared type X2 */ public RuntimeException rethrow( - Throwable e, Class declaredType1, Class declaredType2) throws IOException, X1, X2 { + Throwable e, Class declaredType1, + Class declaredType2 + ) throws IOException, X1, X2 + { Preconditions.checkNotNull(e); thrown = e; Throwables.propagateIfPossible(e, IOException.class); @@ -196,7 +203,8 @@ public final class Closer implements Closeable { * additional exceptions that are thrown after that will be suppressed. */ @Override - public void close() throws IOException { + public void close() throws IOException + { Throwable throwable = thrown; // close closeables in LIFO order @@ -204,7 +212,8 @@ public final class Closer implements Closeable { Closeable closeable = stack.removeFirst(); try { closeable.close(); - } catch (Throwable e) { + } + catch (Throwable e) { if (throwable == null) { throwable = e; } else { @@ -219,8 +228,9 @@ public final class Closer implements Closeable { } } - private void suppress(Throwable thrown, Throwable suppressed) { - if(thrown != suppressed) { + private void suppress(Throwable thrown, Throwable suppressed) + { + if (thrown != suppressed) { thrown.addSuppressed(suppressed); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java index 1aee6aa1742..bb95dce7dc9 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapper.java @@ -145,7 +145,8 @@ public class SmooshedFileMapper implements Closeable for (MappedByteBuffer mappedByteBuffer : buffersList) { try { ByteBufferUtils.unmap(mappedByteBuffer); - } catch (Throwable t) { + } + catch (Throwable t) { if (thrown == null) { thrown = t; } else { diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index 8ac7ea4046c..7df96471115 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -34,7 +34,8 @@ import org.junit.Test; import java.util.Iterator; import java.util.NoSuchElementException; -public class GranularityTest { +public class GranularityTest +{ final Granularity SECOND = Granularities.SECOND; final Granularity MINUTE = Granularities.MINUTE; @@ -47,7 +48,8 @@ public class GranularityTest { final Granularity YEAR = Granularities.YEAR; @Test - public void testHiveFormat() { + public void testHiveFormat() + { PathDate[] secondChecks = { new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "dt=2011-03-15-20-50-43/Test0"), new PathDate(new DateTime(2011, 3, 15, 20, 50, 43, 0), null, "/dt=2011-03-15-20-50-43/Test0"), @@ -123,7 +125,8 @@ public class GranularityTest { } @Test - public void testFifteenMinuteToDate() { + public void testFifteenMinuteToDate() + { PathDate[] minuteChecks = { new PathDate(new DateTime(2011, 3, 15, 20, 45, 0, 0), null, "y=2011/m=03/d=15/H=20/M=50/S=43/Test0"), diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/WithEffectSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/WithEffectSequenceTest.java index 3f91b6bc748..582716e2150 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/WithEffectSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/WithEffectSequenceTest.java @@ -78,7 +78,8 @@ public class WithEffectSequenceTest } @Test - public void testEffectExecutedIfWrappedSequenceThrowsExceptionFromClose() { + public void testEffectExecutedIfWrappedSequenceThrowsExceptionFromClose() + { Sequence baseSeq = Sequences.simple(Arrays.asList(1, 2, 3)); Sequence throwingSeq = Sequences.withBaggage(baseSeq, new Closeable() { @@ -101,7 +102,8 @@ public class WithEffectSequenceTest try { Sequences.toList(seqWithEffect, new ArrayList()); Assert.fail("expected RuntimeException"); - } catch (RuntimeException e) { + } + catch (RuntimeException e) { // expected Assert.assertTrue(effectExecuted.get()); } diff --git a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java index 379900b1901..04fb245c3e1 100644 --- a/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/parsers/TimestampParserTest.java @@ -28,13 +28,15 @@ public class TimestampParserTest { @Test - public void testStripQuotes() throws Exception { + public void testStripQuotes() throws Exception + { Assert.assertEquals("hello world", ParserUtils.stripQuotes("\"hello world\"")); Assert.assertEquals("hello world", ParserUtils.stripQuotes(" \" hello world \" ")); } @Test - public void testAuto() throws Exception { + public void testAuto() throws Exception + { final Function parser = TimestampParser.createObjectTimestampParser("auto"); Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("1234567890000")); Assert.assertEquals(new DateTime("2009-02-13T23:31:30Z"), parser.apply("2009-02-13T23:31:30Z")); @@ -42,14 +44,16 @@ public class TimestampParserTest } @Test - public void testRuby() throws Exception { + public void testRuby() throws Exception + { final Function parser = TimestampParser.createObjectTimestampParser("ruby"); Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply("1358347307.435447")); Assert.assertEquals(new DateTime("2013-01-16T15:41:47+01:00"), parser.apply(1358347307.435447D)); } @Test - public void testNano() throws Exception { + public void testNano() throws Exception + { String timeNsStr = "1427504794977098494"; DateTime expectedDt = new DateTime("2015-3-28T01:06:34.977Z"); final Function parser = TimestampParser.createObjectTimestampParser("nano"); diff --git a/pom.xml b/pom.xml index 52b59b26d57..fde1ef36370 100644 --- a/pom.xml +++ b/pom.xml @@ -877,7 +877,7 @@ com.puppycrawl.tools checkstyle - 6.19 + 8.0 diff --git a/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java b/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java index 898b0119d11..632777f8df6 100644 --- a/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java +++ b/processing/src/main/java/io/druid/jackson/DruidDefaultSerializersModule.java @@ -59,7 +59,8 @@ public class DruidDefaultSerializersModule extends SimpleModule String tzId = jp.getText(); try { return DateTimeZone.forID(tzId); - } catch(IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { // also support Java timezone strings return DateTimeZone.forTimeZone(TimeZone.getTimeZone(tzId)); } @@ -128,7 +129,8 @@ public class DruidDefaultSerializersModule extends SimpleModule yielder = yielder.next(null); } jgen.writeEndArray(); - } finally { + } + finally { yielder.close(); } } diff --git a/processing/src/main/java/io/druid/jackson/JodaStuff.java b/processing/src/main/java/io/druid/jackson/JodaStuff.java index b4c9e4f762b..380e9ef1a82 100644 --- a/processing/src/main/java/io/druid/jackson/JodaStuff.java +++ b/processing/src/main/java/io/druid/jackson/JodaStuff.java @@ -87,7 +87,8 @@ class JodaStuff private static class DateTimeDeserializer extends StdDeserializer { - public DateTimeDeserializer() { + public DateTimeDeserializer() + { super(DateTime.class); } diff --git a/processing/src/main/java/io/druid/jackson/SegmentizerModule.java b/processing/src/main/java/io/druid/jackson/SegmentizerModule.java index e417034ac5f..1056d0884cc 100644 --- a/processing/src/main/java/io/druid/jackson/SegmentizerModule.java +++ b/processing/src/main/java/io/druid/jackson/SegmentizerModule.java @@ -25,7 +25,8 @@ import io.druid.segment.loading.MMappedQueryableSegmentizerFactory; public class SegmentizerModule extends SimpleModule { - public SegmentizerModule() { + public SegmentizerModule() + { super("SegmentizerModule"); registerSubtypes(new NamedType(MMappedQueryableSegmentizerFactory.class, "mMapSegmentFactory")); } diff --git a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java index 182d18b1f0d..3d38964184e 100644 --- a/processing/src/main/java/io/druid/query/AsyncQueryRunner.java +++ b/processing/src/main/java/io/druid/query/AsyncQueryRunner.java @@ -40,7 +40,8 @@ public class AsyncQueryRunner implements QueryRunner private final ListeningExecutorService executor; private final QueryWatcher queryWatcher; - public AsyncQueryRunner(QueryRunner baseRunner, ExecutorService executor, QueryWatcher queryWatcher) { + public AsyncQueryRunner(QueryRunner baseRunner, ExecutorService executor, QueryWatcher queryWatcher) + { this.baseRunner = baseRunner; this.executor = MoreExecutors.listeningDecorator(executor); this.queryWatcher = queryWatcher; @@ -52,7 +53,8 @@ public class AsyncQueryRunner implements QueryRunner final Query query = queryPlus.getQuery(); final int priority = QueryContexts.getPriority(query); final QueryPlus threadSafeQueryPlus = queryPlus.withoutThreadUnsafeState(); - final ListenableFuture> future = executor.submit(new AbstractPrioritizedCallable>(priority) + final ListenableFuture> future = executor.submit( + new AbstractPrioritizedCallable>(priority) { @Override public Sequence call() throws Exception @@ -61,7 +63,8 @@ public class AsyncQueryRunner implements QueryRunner //run() method and resulting sequence accumulate/yield is fast. return baseRunner.run(threadSafeQueryPlus, responseContext); } - }); + } + ); queryWatcher.registerQuery(query, future); return new LazySequence<>(new Supplier>() @@ -75,7 +78,8 @@ public class AsyncQueryRunner implements QueryRunner } else { return future.get(); } - } catch (ExecutionException | InterruptedException | TimeoutException ex) { + } + catch (ExecutionException | InterruptedException | TimeoutException ex) { throw Throwables.propagate(ex); } } diff --git a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java index 5f90615fba4..bce67514d97 100644 --- a/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java +++ b/processing/src/main/java/io/druid/query/CPUTimeMetricQueryRunner.java @@ -72,7 +72,8 @@ public class CPUTimeMetricQueryRunner implements QueryRunner final long start = VMUtils.getCurrentThreadCpuTime(); try { return sequenceProcessing.get(); - } finally { + } + finally { cpuTimeAccumulator.addAndGet(VMUtils.getCurrentThreadCpuTime() - start); } } diff --git a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java index 1312fb1b024..957a91ac6e2 100644 --- a/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ChainedExecutionQueryRunner.java @@ -133,9 +133,11 @@ public class ChainedExecutionQueryRunner implements QueryRunner } return retVal; - } catch (QueryInterruptedException e) { + } + catch (QueryInterruptedException e) { throw Throwables.propagate(e); - } catch (Exception e) { + } + catch (Exception e) { log.error(e, "Exception with one of the sequences!"); throw Throwables.propagate(e); } @@ -157,17 +159,21 @@ public class ChainedExecutionQueryRunner implements QueryRunner futures.get(QueryContexts.getTimeout(query), TimeUnit.MILLISECONDS) : futures.get() ).iterator(); - } catch (InterruptedException e) { + } + catch (InterruptedException e) { log.warn(e, "Query interrupted, cancelling pending results, query id [%s]", query.getId()); futures.cancel(true); throw new QueryInterruptedException(e); - } catch (CancellationException e) { + } + catch (CancellationException e) { throw new QueryInterruptedException(e); - } catch (TimeoutException e) { + } + catch (TimeoutException e) { log.info("Query timeout, cancelling pending results for query id [%s]", query.getId()); futures.cancel(true); throw new QueryInterruptedException(e); - } catch (ExecutionException e) { + } + catch (ExecutionException e) { throw Throwables.propagate(e.getCause()); } } diff --git a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java index 2237ad8c5b1..cbc5aa6dead 100644 --- a/processing/src/main/java/io/druid/query/ConcatQueryRunner.java +++ b/processing/src/main/java/io/druid/query/ConcatQueryRunner.java @@ -31,9 +31,8 @@ public class ConcatQueryRunner implements QueryRunner { private final Sequence> queryRunners; - public ConcatQueryRunner( - Sequence> queryRunners - ) { + public ConcatQueryRunner(Sequence> queryRunners) + { this.queryRunners = queryRunners; } diff --git a/processing/src/main/java/io/druid/query/DruidProcessingConfig.java b/processing/src/main/java/io/druid/query/DruidProcessingConfig.java index f6c7c9fdd9e..cb826d9d145 100644 --- a/processing/src/main/java/io/druid/query/DruidProcessingConfig.java +++ b/processing/src/main/java/io/druid/query/DruidProcessingConfig.java @@ -82,7 +82,8 @@ public abstract class DruidProcessingConfig extends ExecutorServiceConfig implem } @Config(value = "${base_path}.tmpDir") - public String getTmpDir() { + public String getTmpDir() + { return System.getProperty("java.io.tmpdir"); } } diff --git a/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java b/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java index a52da78c9ef..5d3fefaad80 100644 --- a/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java +++ b/processing/src/main/java/io/druid/query/FluentQueryRunnerBuilder.java @@ -29,7 +29,8 @@ public class FluentQueryRunnerBuilder { final QueryToolChest> toolChest; - public FluentQueryRunner create(QueryRunner baseRunner) { + public FluentQueryRunner create(QueryRunner baseRunner) + { return new FluentQueryRunner(baseRunner); } @@ -55,7 +56,8 @@ public class FluentQueryRunnerBuilder return baseRunner.run(queryPlus, responseContext); } - public FluentQueryRunner from(QueryRunner runner) { + public FluentQueryRunner from(QueryRunner runner) + { return new FluentQueryRunner(runner); } diff --git a/processing/src/main/java/io/druid/query/QueryDataSource.java b/processing/src/main/java/io/druid/query/QueryDataSource.java index 6e96eaa6505..1cb66a415a0 100644 --- a/processing/src/main/java/io/druid/query/QueryDataSource.java +++ b/processing/src/main/java/io/druid/query/QueryDataSource.java @@ -50,7 +50,10 @@ public class QueryDataSource implements DataSource } @Override - public String toString() { return query.toString(); } + public String toString() + { + return query.toString(); + } @Override public boolean equals(Object o) diff --git a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java index 62b7947c58e..2638aa5a0c3 100644 --- a/processing/src/main/java/io/druid/query/QueryRunnerHelper.java +++ b/processing/src/main/java/io/druid/query/QueryRunnerHelper.java @@ -72,7 +72,8 @@ public class QueryRunnerHelper ); } - public static QueryRunner makeClosingQueryRunner(final QueryRunner runner, final Closeable closeable){ + public static QueryRunner makeClosingQueryRunner(final QueryRunner runner, final Closeable closeable) + { return new QueryRunner() { @Override diff --git a/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java b/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java index c42e9ad6a44..98953a5687c 100644 --- a/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java +++ b/processing/src/main/java/io/druid/query/ResultGranularTimestampComparator.java @@ -45,7 +45,8 @@ public class ResultGranularTimestampComparator implements Comparator Ordering> create(Granularity granularity, boolean descending) { + public static Ordering> create(Granularity granularity, boolean descending) + { Comparator> comparator = new ResultGranularTimestampComparator<>(granularity); return descending ? Ordering.from(comparator).reverse() : Ordering.from(comparator); } diff --git a/processing/src/main/java/io/druid/query/RetryQueryRunner.java b/processing/src/main/java/io/druid/query/RetryQueryRunner.java index 57a1bafdada..425edad69ef 100644 --- a/processing/src/main/java/io/druid/query/RetryQueryRunner.java +++ b/processing/src/main/java/io/druid/query/RetryQueryRunner.java @@ -101,8 +101,7 @@ public class RetryQueryRunner implements QueryRunner Sequences.simple(listOfSequences)).toYielder( initValue, accumulator ); - } - else { + } else { return Iterables.getOnlyElement(listOfSequences).toYielder(initValue, accumulator); } } diff --git a/processing/src/main/java/io/druid/query/TableDataSource.java b/processing/src/main/java/io/druid/query/TableDataSource.java index c558abe4c5a..8f6f29ffdb8 100644 --- a/processing/src/main/java/io/druid/query/TableDataSource.java +++ b/processing/src/main/java/io/druid/query/TableDataSource.java @@ -38,7 +38,8 @@ public class TableDataSource implements DataSource } @JsonProperty - public String getName(){ + public String getName() + { return name; } @@ -49,7 +50,10 @@ public class TableDataSource implements DataSource } @Override - public String toString() { return name; } + public String toString() + { + return name; + } @Override public boolean equals(Object o) diff --git a/processing/src/main/java/io/druid/query/TimewarpOperator.java b/processing/src/main/java/io/druid/query/TimewarpOperator.java index df566b52502..5f45a76192b 100644 --- a/processing/src/main/java/io/druid/query/TimewarpOperator.java +++ b/processing/src/main/java/io/druid/query/TimewarpOperator.java @@ -102,10 +102,12 @@ public class TimewarpOperator implements PostProcessingOperator if (value instanceof TimeBoundaryResultValue) { TimeBoundaryResultValue boundary = (TimeBoundaryResultValue) value; - DateTime minTime = null; + DateTime minTime; try { minTime = boundary.getMinTime(); - } catch (IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { + minTime = null; } final DateTime maxTime = boundary.getMaxTime(); diff --git a/processing/src/main/java/io/druid/query/aggregation/Histogram.java b/processing/src/main/java/io/druid/query/aggregation/Histogram.java index 3beda4ea7ab..c2b74bc6bdf 100644 --- a/processing/src/main/java/io/druid/query/aggregation/Histogram.java +++ b/processing/src/main/java/io/druid/query/aggregation/Histogram.java @@ -36,7 +36,8 @@ public class Histogram public float min; public float max; - public Histogram(float[] breaks) { + public Histogram(float[] breaks) + { Preconditions.checkArgument(breaks != null, "Histogram breaks must not be null"); this.breaks = breaks; @@ -46,7 +47,8 @@ public class Histogram this.max = Float.NEGATIVE_INFINITY; } - public Histogram(float[] breaks, long[] bins, float min, float max) { + public Histogram(float[] breaks, long[] bins, float min, float max) + { this.breaks = breaks; this.bins = bins; this.min = min; @@ -56,7 +58,8 @@ public class Histogram } } - public void offer(float d) { + public void offer(float d) + { if(d > max) { max = d; } @@ -70,13 +73,14 @@ public class Histogram count++; } - public Histogram fold(Histogram h) { + public Histogram fold(Histogram h) + { Preconditions.checkArgument(Arrays.equals(breaks, h.breaks), "Cannot fold histograms with different breaks"); - if(h.min < min) { + if (h.min < min) { min = h.min; } - if(h.max > max) { + if (h.max > max) { max = h.max; } @@ -129,15 +133,17 @@ public class Histogram } @JsonValue - public byte[] toBytes() { - ByteBuffer buf = ByteBuffer.allocate(Ints.BYTES + Floats.BYTES * breaks.length + - Longs.BYTES * bins.length + Floats.BYTES * 2); + public byte[] toBytes() + { + ByteBuffer buf = ByteBuffer.allocate( + Ints.BYTES + Floats.BYTES * breaks.length + Longs.BYTES * bins.length + Floats.BYTES * 2 + ); buf.putInt(breaks.length); - for(float b : breaks) { + for (float b : breaks) { buf.putFloat(b); } - for(long c : bins ) { + for (long c : bins) { buf.putLong(c); } buf.putFloat(min); @@ -153,20 +159,23 @@ public class Histogram * * @return a visual representation of this histogram */ - public HistogramVisual asVisual() { + public HistogramVisual asVisual() + { float[] visualCounts = new float[bins.length - 2]; - for(int i = 0; i < visualCounts.length; ++i) { + for (int i = 0; i < visualCounts.length; ++i) { visualCounts[i] = (float) bins[i + 1]; } return new HistogramVisual(breaks, visualCounts, new float[]{min, max}); } - public static Histogram fromBytes(byte[] bytes) { + public static Histogram fromBytes(byte[] bytes) + { ByteBuffer buf = ByteBuffer.wrap(bytes); return fromBytes(buf); } - public static Histogram fromBytes(ByteBuffer buf) { + public static Histogram fromBytes(ByteBuffer buf) + { int n = buf.getInt(); float[] breaks = new float[n]; long[] bins = new long[n + 1]; @@ -174,7 +183,7 @@ public class Histogram for (int i = 0; i < breaks.length; ++i) { breaks[i] = buf.getFloat(); } - for (int i = 0; i < bins.length ; ++i) { + for (int i = 0; i < bins.length; ++i) { bins[i] = buf.getLong(); } diff --git a/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java b/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java index 2729877237b..046d5362cc8 100644 --- a/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/JavaScriptBufferAggregator.java @@ -61,7 +61,7 @@ public class JavaScriptBufferAggregator implements BufferAggregator @Override public float getFloat(ByteBuffer buf, int position) { - return (float)buf.getDouble(position); + return (float) buf.getDouble(position); } @@ -78,7 +78,8 @@ public class JavaScriptBufferAggregator implements BufferAggregator } @Override - public void close() { + public void close() + { script.close(); } diff --git a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java index a05f19d4226..6f6b7f003aa 100644 --- a/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/post/ArithmeticPostAggregator.java @@ -195,46 +195,41 @@ public class ArithmeticPostAggregator implements PostAggregator private static enum Ops { - PLUS("+") - { - @Override - public double compute(double lhs, double rhs) - { - return lhs + rhs; - } - }, - MINUS("-") - { - @Override - public double compute(double lhs, double rhs) - { - return lhs - rhs; - } - }, - MULT("*") - { - @Override - public double compute(double lhs, double rhs) - { - return lhs * rhs; - } - }, - DIV("/") - { - @Override - public double compute(double lhs, double rhs) - { - return (rhs == 0.0) ? 0 : (lhs / rhs); - } - }, - QUOTIENT("quotient") - { - @Override - public double compute(double lhs, double rhs) - { - return lhs / rhs; - } - }; + PLUS("+") { + @Override + public double compute(double lhs, double rhs) + { + return lhs + rhs; + } + }, + MINUS("-") { + @Override + public double compute(double lhs, double rhs) + { + return lhs - rhs; + } + }, + MULT("*") { + @Override + public double compute(double lhs, double rhs) + { + return lhs * rhs; + } + }, + DIV("/") { + @Override + public double compute(double lhs, double rhs) + { + return (rhs == 0.0) ? 0 : (lhs / rhs); + } + }, + QUOTIENT("quotient") { + @Override + public double compute(double lhs, double rhs) + { + return lhs / rhs; + } + }; private static final Map lookupMap = Maps.newHashMap(); @@ -269,25 +264,22 @@ public class ArithmeticPostAggregator implements PostAggregator } } - public static enum Ordering implements Comparator { + public static enum Ordering implements Comparator + { // ensures the following order: numeric > NaN > Infinite // The name may be referenced via Ordering.valueOf(ordering) in the constructor. numericFirst { @Override - public int compare(Double lhs, Double rhs) { - if(isFinite(lhs) && !isFinite(rhs)) { + public int compare(Double lhs, Double rhs) + { + if (Double.isFinite(lhs) && !Double.isFinite(rhs)) { return 1; } - if(!isFinite(lhs) && isFinite(rhs)) { + if (!Double.isFinite(lhs) && Double.isFinite(rhs)) { return -1; } return Double.compare(lhs, rhs); } - - // Double.isFinite only exist in JDK8 - private boolean isFinite(double value) { - return !Double.isInfinite(value) && !Double.isNaN(value); - } } } diff --git a/processing/src/main/java/io/druid/query/aggregation/post/ExpressionPostAggregator.java b/processing/src/main/java/io/druid/query/aggregation/post/ExpressionPostAggregator.java index 3e37c5d38b4..8ee4c396e0a 100644 --- a/processing/src/main/java/io/druid/query/aggregation/post/ExpressionPostAggregator.java +++ b/processing/src/main/java/io/druid/query/aggregation/post/ExpressionPostAggregator.java @@ -53,7 +53,8 @@ public class ExpressionPostAggregator implements PostAggregator } else { return o1.compareTo(o2); } - }); + } + ); private final String name; private final String expression; diff --git a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java index 6f8c9b5d1dc..91a9c16f97b 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java @@ -48,23 +48,17 @@ public class TimeDimExtractionFn extends DimExtractionFn Preconditions.checkNotNull(resultFormat, "resultFormat must not be null"); this.timeFormat = timeFormat; - this.timeFormatter = new ThreadLocal() { - @Override - public SimpleDateFormat initialValue() { - SimpleDateFormat formatter = new SimpleDateFormat(TimeDimExtractionFn.this.timeFormat); - formatter.setLenient(true); - return formatter; - } - }; + this.timeFormatter = ThreadLocal.withInitial(() -> { + SimpleDateFormat formatter = new SimpleDateFormat(TimeDimExtractionFn.this.timeFormat); + formatter.setLenient(true); + return formatter; + }); this.resultFormat = resultFormat; - this.resultFormatter = new ThreadLocal() { - @Override - public SimpleDateFormat initialValue() { - SimpleDateFormat formatter = new SimpleDateFormat(TimeDimExtractionFn.this.resultFormat); - return formatter; - } - }; + this.resultFormatter = ThreadLocal.withInitial(() -> { + SimpleDateFormat formatter = new SimpleDateFormat(TimeDimExtractionFn.this.resultFormat); + return formatter; + }); } @Override diff --git a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java index b74b4e622e5..e4759449866 100644 --- a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java @@ -543,8 +543,7 @@ public class BoundDimFilter implements DimFilter if (matchesNothing) { predicate = DruidFloatPredicate.ALWAYS_FALSE; } else { - predicate = input -> - { + predicate = input -> { final DruidDoublePredicate druidDoublePredicate = makeDoublePredicateFromBounds( hasLowerFloatBound, hasUpperFloatBound, diff --git a/processing/src/main/java/io/druid/query/filter/InDimFilter.java b/processing/src/main/java/io/druid/query/filter/InDimFilter.java index 1eeebb1b7e3..d12d0a55274 100644 --- a/processing/src/main/java/io/druid/query/filter/InDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/InDimFilter.java @@ -152,7 +152,8 @@ public class InDimFilter implements DimFilter return inFilter; } - private InDimFilter optimizeLookup() { + private InDimFilter optimizeLookup() + { if (extractionFn instanceof LookupExtractionFn && ((LookupExtractionFn) extractionFn).isOptimize()) { LookupExtractionFn exFn = (LookupExtractionFn) extractionFn; diff --git a/processing/src/main/java/io/druid/query/filter/NotDimFilter.java b/processing/src/main/java/io/druid/query/filter/NotDimFilter.java index 978a685d677..fef83c660f8 100644 --- a/processing/src/main/java/io/druid/query/filter/NotDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/NotDimFilter.java @@ -36,10 +36,7 @@ import java.util.List; public class NotDimFilter implements DimFilter { private static final Function NEGATE = - new Function() { - @Override - public DimFilter apply(DimFilter filter) { return Druids.newNotDimFilterBuilder().field(filter).build(); } - }; + filter -> Druids.newNotDimFilterBuilder().field(filter).build(); final private DimFilter field; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java index 8e597c24dfa..25ca039860e 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java @@ -63,7 +63,8 @@ public class ByteBufferIntList buffer.putInt(index * Ints.BYTES, val); } - public int get(int index) { + public int get(int index) + { return buffer.getInt(index * Ints.BYTES); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeap.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeap.java index ea203a66051..852177966ea 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeap.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferMinMaxOffsetHeap.java @@ -84,7 +84,8 @@ public class ByteBufferMinMaxOffsetHeap } } - public int removeMin() { + public int removeMin() + { if (heapSize < 1) { throw new ISE("Empty heap"); } @@ -113,7 +114,8 @@ public class ByteBufferMinMaxOffsetHeap return minOffset; } - public int removeMax() { + public int removeMax() + { int maxOffset; if (heapSize < 1) { throw new ISE("Empty heap"); @@ -156,7 +158,8 @@ public class ByteBufferMinMaxOffsetHeap return maxOffset; } - public int removeAt(int deletedIndex) { + public int removeAt(int deletedIndex) + { if (heapSize < 1) { throw new ISE("Empty heap"); } @@ -185,15 +188,18 @@ public class ByteBufferMinMaxOffsetHeap return deletedOffset; } - public void setAt(int index, int newVal) { + public void setAt(int index, int newVal) + { buf.putInt(index * Ints.BYTES, newVal); } - public int getAt(int index) { + public int getAt(int index) + { return buf.getInt(index * Ints.BYTES); } - public int indexOf(int offset) { + public int indexOf(int offset) + { for (int i = 0; i < heapSize; i++) { int curOffset = buf.getInt(i * Ints.BYTES); if (curOffset == offset) { @@ -203,14 +209,16 @@ public class ByteBufferMinMaxOffsetHeap return -1; } - public void removeOffset(int offset) { + public void removeOffset(int offset) + { int index = indexOf(offset); if (index > -1) { removeAt(index); } } - public int getHeapSize() { + public int getHeapSize() + { return heapSize; } @@ -336,7 +344,8 @@ public class ByteBufferMinMaxOffsetHeap } } - private boolean isEvenLevel(int index) { + private boolean isEvenLevel(int index) + { int oneBased = index + 1; return (oneBased & EVEN_POWERS_OF_TWO) > (oneBased & ODD_POWERS_OF_TWO); } @@ -346,7 +355,8 @@ public class ByteBufferMinMaxOffsetHeap * {@code index + len}, or {@code -1} if {@code index} is greater than * {@code size}. */ - private int findMin(Comparator comparator, int index, int len) { + private int findMin(Comparator comparator, int index, int len) + { if (index >= heapSize) { return -1; } @@ -363,14 +373,16 @@ public class ByteBufferMinMaxOffsetHeap /** * Returns the minimum child or {@code -1} if no child exists. */ - private int findMinChild(Comparator comparator, int index) { + private int findMinChild(Comparator comparator, int index) + { return findMin(comparator, getLeftChildIndex(index), 2); } /** * Returns the minimum grand child or -1 if no grand child exists. */ - private int findMinGrandChild(Comparator comparator, int index) { + private int findMinGrandChild(Comparator comparator, int index) + { int leftChildIndex = getLeftChildIndex(index); if (leftChildIndex < 0) { return -1; @@ -378,22 +390,26 @@ public class ByteBufferMinMaxOffsetHeap return findMin(comparator, getLeftChildIndex(leftChildIndex), 4); } - private int getLeftChildIndex(int i) { + private int getLeftChildIndex(int i) + { return i * 2 + 1; } - private int getRightChildIndex(int i) { + private int getRightChildIndex(int i) + { return i * 2 + 2; } - private int getParentIndex(int i) { + private int getParentIndex(int i) + { if (i == 0) { return -1; } return (i - 1) / 2; } - private int getGrandparentIndex(int i) { + private int getGrandparentIndex(int i) + { if (i < 3) { return -1; } @@ -403,7 +419,8 @@ public class ByteBufferMinMaxOffsetHeap /** * Returns the index of the max element. */ - private int findMaxElementIndex() { + private int findMaxElementIndex() + { switch (heapSize) { case 1: return 0; // The lone element in the queue is the maximum. @@ -419,7 +436,8 @@ public class ByteBufferMinMaxOffsetHeap } @VisibleForTesting - boolean isIntact() { + boolean isIntact() + { for (int i = 0; i < heapSize; i++) { if (!verifyIndex(i)) { return false; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/Groupers.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/Groupers.java index 2324f0f3779..eb47abc70cd 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/Groupers.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/Groupers.java @@ -42,7 +42,8 @@ public class Groupers * MurmurHash3 was written by Austin Appleby, and is placed in the public domain. The author * hereby disclaims copyright to this source code. */ - static int smear(int hashCode) { + static int smear(int hashCode) + { return C2 * Integer.rotateLeft(hashCode * C1, 15); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferGrouper.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferGrouper.java index dd6442835ba..a5513c8503c 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferGrouper.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedBufferGrouper.java @@ -236,7 +236,8 @@ public class LimitedBufferGrouper extends AbstractBufferGrouper, Sequence> build( - List dimensions, List aggs, List postAggs + List dimensions, + List aggs, + List postAggs ) { return Functions.identity(); @@ -75,7 +77,8 @@ public final class NoopLimitSpec implements LimitSpec } @Override - public int hashCode() { + public int hashCode() + { return 0; } diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java index 1e09b2e78f5..942580efdbb 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java @@ -163,7 +163,8 @@ public class OrderByColumnSpec return null; } - public static int getDimIndexForOrderBy(OrderByColumnSpec orderSpec, List dimensions) { + public static int getDimIndexForOrderBy(OrderByColumnSpec orderSpec, List dimensions) + { int i = 0; for (DimensionSpec dimSpec : dimensions) { if (orderSpec.getDimension().equals((dimSpec.getOutputName()))) { @@ -174,7 +175,8 @@ public class OrderByColumnSpec return -1; } - public static int getAggIndexForOrderBy(OrderByColumnSpec orderSpec, List aggregatorFactories) { + public static int getAggIndexForOrderBy(OrderByColumnSpec orderSpec, List aggregatorFactories) + { int i = 0; for (AggregatorFactory agg : aggregatorFactories) { if (orderSpec.getDimension().equals((agg.getName()))) { @@ -185,7 +187,8 @@ public class OrderByColumnSpec return -1; } - public static int getPostAggIndexForOrderBy(OrderByColumnSpec orderSpec, List postAggs) { + public static int getPostAggIndexForOrderBy(OrderByColumnSpec orderSpec, List postAggs) + { int i = 0; for (PostAggregator postAgg : postAggs) { if (orderSpec.getDimension().equals((postAgg.getName()))) { @@ -196,10 +199,7 @@ public class OrderByColumnSpec return -1; } - public OrderByColumnSpec( - String dimension, - Direction direction - ) + public OrderByColumnSpec(String dimension, Direction direction) { this(dimension, direction, null); } diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java index e2a5d992fe6..b0c65a7e098 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractionFn.java @@ -81,11 +81,17 @@ public class LookupExtractionFn extends FunctionalExtraction @Override @JsonProperty - public boolean isRetainMissingValue() {return super.isRetainMissingValue();} + public boolean isRetainMissingValue() + { + return super.isRetainMissingValue(); + } @Override @JsonProperty - public String getReplaceMissingValueWith() {return super.getReplaceMissingValueWith();} + public String getReplaceMissingValueWith() + { + return super.getReplaceMissingValueWith(); + } @Override @JsonProperty diff --git a/processing/src/main/java/io/druid/query/lookup/LookupExtractorFactoryContainer.java b/processing/src/main/java/io/druid/query/lookup/LookupExtractorFactoryContainer.java index 76719d582b5..469fb01ea04 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupExtractorFactoryContainer.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupExtractorFactoryContainer.java @@ -55,7 +55,8 @@ public class LookupExtractorFactoryContainer return lookupExtractorFactory; } - public boolean replaces(LookupExtractorFactoryContainer other) { + public boolean replaces(LookupExtractorFactoryContainer other) + { if (version == null && other.getVersion() == null) { return this.lookupExtractorFactory.replaces(other.getLookupExtractorFactory()); } diff --git a/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java b/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java index 88e3578f682..5592f7bf935 100644 --- a/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java +++ b/processing/src/main/java/io/druid/query/lookup/LookupReferencesManager.java @@ -172,7 +172,8 @@ public class LookupReferencesManager for (Notice notice : swappedState.noticesBeingHandled) { try { notice.handle(lookupMap); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Exception occured while handling lookup notice [%s].", notice); LOG.makeAlert("Exception occured while handling lookup notice, with message [%s].", ex.getMessage()).emit(); } diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java index fd4add3da80..a8d71d3a319 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentMetadataQueryConfig.java @@ -62,7 +62,10 @@ public class SegmentMetadataQueryConfig this.defaultHistory = ISO_FORMATTER.parsePeriod(period); } - public EnumSet getDefaultAnalysisTypes() { return defaultAnalysisTypes; } + public EnumSet getDefaultAnalysisTypes() + { + return defaultAnalysisTypes; + } public void setDefaultAnalysisTypes(EnumSet defaultAnalysisTypes) { diff --git a/processing/src/main/java/io/druid/query/ordering/StringComparators.java b/processing/src/main/java/io/druid/query/ordering/StringComparators.java index 2a66b3c0559..441521bf3c8 100644 --- a/processing/src/main/java/io/druid/query/ordering/StringComparators.java +++ b/processing/src/main/java/io/druid/query/ordering/StringComparators.java @@ -103,43 +103,34 @@ public class StringComparators @Override public int compare(String str1, String str2) { - int[] pos = - { 0, 0 }; + int[] pos = { 0, 0 }; - if (str1 == null) - { + if (str1 == null) { if (str2 == null) { return 0; } return -1; - } else if (str2 == null) - { + } else if (str2 == null) { return 1; - } else if (str1.length() == 0) - { + } else if (str1.length() == 0) { return str2.length() == 0 ? 0 : -1; - } else if (str2.length() == 0) - { + } else if (str2.length() == 0) { return 1; } - while (pos[0] < str1.length() && pos[1] < str2.length()) - { + while (pos[0] < str1.length() && pos[1] < str2.length()) { int ch1 = str1.codePointAt(pos[0]); int ch2 = str2.codePointAt(pos[1]); int result = 0; - if (isDigit(ch1)) - { + if (isDigit(ch1)) { result = isDigit(ch2) ? compareNumbers(str1, str2, pos) : -1; - } else - { + } else { result = isDigit(ch2) ? 1 : compareNonNumeric(str1, str2, pos); } - if (result != 0) - { + if (result != 0) { return result; } } @@ -154,13 +145,11 @@ public class StringComparators int ch0 = -1, ch1 = -1; // Skip leading zeroes, but keep a count of them. - while (pos[0] < str0.length() && isZero(ch0 = str0.codePointAt(pos[0]))) - { + while (pos[0] < str0.length() && isZero(ch0 = str0.codePointAt(pos[0]))) { zeroes0++; pos[0] += Character.charCount(ch0); } - while (pos[1] < str1.length() && isZero(ch1 = str1.codePointAt(pos[1]))) - { + while (pos[1] < str1.length() && isZero(ch1 = str1.codePointAt(pos[1]))) { zeroes1++; pos[1] += Character.charCount(ch1); } @@ -169,52 +158,39 @@ public class StringComparators // other, it's a larger number. In case they turn out to have // equal lengths, we compare digits at each position; the first // unequal pair determines which is the bigger number. - while (true) - { + while (true) { boolean noMoreDigits0 = (ch0 < 0) || !isDigit(ch0); boolean noMoreDigits1 = (ch1 < 0) || !isDigit(ch1); - if (noMoreDigits0 && noMoreDigits1) - { + if (noMoreDigits0 && noMoreDigits1) { return delta != 0 ? delta : zeroes0 - zeroes1; - } else if (noMoreDigits0) - { + } else if (noMoreDigits0) { return -1; - } else if (noMoreDigits1) - { + } else if (noMoreDigits1) { return 1; - } else if (delta == 0 && ch0 != ch1) - { + } else if (delta == 0 && ch0 != ch1) { delta = valueOf(ch0) - valueOf(ch1); } - if (pos[0] < str0.length()) - { + if (pos[0] < str0.length()) { ch0 = str0.codePointAt(pos[0]); - if (isDigit(ch0)) - { + if (isDigit(ch0)) { pos[0] += Character.charCount(ch0); - } else - { + } else { ch0 = -1; } - } else - { + } else { ch0 = -1; } - if (pos[1] < str1.length()) - { + if (pos[1] < str1.length()) { ch1 = str1.codePointAt(pos[1]); - if (isDigit(ch1)) - { + if (isDigit(ch1)) { pos[1] += Character.charCount(ch1); - } else - { + } else { ch1 = -1; } - } else - { + } else { ch1 = -1; } } @@ -236,24 +212,19 @@ public class StringComparators private int valueOf(int digit) { - if (digit <= '9') - { + if (digit <= '9') { return digit - '0'; } - if (digit <= '\u0669') - { + if (digit <= '\u0669') { return digit - '\u0660'; } - if (digit <= '\u06F9') - { + if (digit <= '\u06F9') { return digit - '\u06F0'; } - if (digit <= '\u096F') - { + if (digit <= '\u096F') { return digit - '\u0966'; } - if (digit <= '\uFF19') - { + if (digit <= '\uFF19') { return digit - '\uFF10'; } @@ -266,16 +237,14 @@ public class StringComparators int start0 = pos[0]; int ch0 = str0.codePointAt(pos[0]); pos[0] += Character.charCount(ch0); - while (pos[0] < str0.length() && !isDigit(ch0 = str0.codePointAt(pos[0]))) - { + while (pos[0] < str0.length() && !isDigit(ch0 = str0.codePointAt(pos[0]))) { pos[0] += Character.charCount(ch0); } int start1 = pos[1]; int ch1 = str1.codePointAt(pos[1]); pos[1] += Character.charCount(ch1); - while (pos[1] < str1.length() && !isDigit(ch1 = str1.codePointAt(pos[1]))) - { + while (pos[1] < str1.length() && !isDigit(ch1 = str1.codePointAt(pos[1]))) { pos[1] += Character.charCount(ch1); } @@ -358,7 +327,8 @@ public class StringComparators } } - private static BigDecimal convertStringToBigDecimal(String input) { + private static BigDecimal convertStringToBigDecimal(String input) + { if (input == null) { return null; } @@ -367,7 +337,8 @@ public class StringComparators BigDecimal bd = null; try { bd = new BigDecimal(input); - } catch (NumberFormatException ex) { + } + catch (NumberFormatException ex) { } return bd; } diff --git a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java index 592418e0a8c..7bf01293202 100644 --- a/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/search/SearchQueryQueryToolChest.java @@ -155,17 +155,7 @@ public class SearchQueryQueryToolChest extends QueryToolChest dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); private final List dimOutputNames = dimensionSpecs.size() > 0 ? - Lists.transform( - dimensionSpecs, - new Function() { - @Override - public String apply(DimensionSpec input) { - return input.getOutputName(); - } - } - ) - : - Collections.emptyList(); + Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList(); @Override public boolean isCacheable(SearchQuery query, boolean willMergeRunners) diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java index 706278dbb36..b651b4362d0 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryEngine.java @@ -162,10 +162,13 @@ public class SelectQueryEngine } } } - public static class DoubleSelectColumnSelectorStrategy implements SelectColumnSelectorStrategy { + public static class DoubleSelectColumnSelectorStrategy implements SelectColumnSelectorStrategy + { @Override public void addRowValuesToSelectResult( - String outputName, DoubleColumnSelector dimSelector, Map resultMap + String outputName, + DoubleColumnSelector dimSelector, + Map resultMap ) { if (dimSelector == null) { diff --git a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java index 7dabd959ec1..da15120e8df 100644 --- a/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java +++ b/processing/src/main/java/io/druid/query/select/SelectQueryQueryToolChest.java @@ -167,17 +167,7 @@ public class SelectQueryQueryToolChest extends QueryToolChest dimensionSpecs = query.getDimensions() != null ? query.getDimensions() : Collections.emptyList(); private final List dimOutputNames = dimensionSpecs.size() > 0 ? - Lists.transform( - dimensionSpecs, - new Function() { - @Override - public String apply(DimensionSpec input) { - return input.getOutputName(); - } - } - ) - : - Collections.emptyList(); + Lists.transform(dimensionSpecs, DimensionSpec::getOutputName) : Collections.emptyList(); @Override public boolean isCacheable(SelectQuery query, boolean willMergeRunners) diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java index cc2d66139c9..bbf7f5b54a0 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentSpec.java @@ -34,9 +34,8 @@ public class SpecificSegmentSpec implements QuerySegmentSpec { private final SegmentDescriptor descriptor; - public SpecificSegmentSpec( - SegmentDescriptor descriptor - ) { + public SpecificSegmentSpec(SegmentDescriptor descriptor) + { this.descriptor = descriptor; } @@ -52,7 +51,10 @@ public class SpecificSegmentSpec implements QuerySegmentSpec return walker.getQueryRunnerForSegments(query, Collections.singletonList(descriptor)); } - public SegmentDescriptor getDescriptor() { return descriptor; } + public SegmentDescriptor getDescriptor() + { + return descriptor; + } @Override public boolean equals(Object o) diff --git a/processing/src/main/java/io/druid/segment/BitmapOffset.java b/processing/src/main/java/io/druid/segment/BitmapOffset.java index 87f815d179d..821f4227f02 100644 --- a/processing/src/main/java/io/druid/segment/BitmapOffset.java +++ b/processing/src/main/java/io/druid/segment/BitmapOffset.java @@ -49,8 +49,7 @@ public class BitmapOffset extends Offset private static final String DEFAULT_FULLNESS_FACTORIZATION_STOPS = "0.01,0.1,0.3,0.5,0.7,0.9,0.99"; private static final double[] BITMAP_FULLNESS_FACTORIZATION_STOPS; private static final String[] FACTORIZED_FULLNESS; - static - { + static { String stopString = System.getProperty("bitmapFullnessFactorizationStops", DEFAULT_FULLNESS_FACTORIZATION_STOPS); String[] stopsArray = stopString.split(","); if (stopsArray.length == 0) { diff --git a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java index 8a96b3fb564..6567c16b022 100644 --- a/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java +++ b/processing/src/main/java/io/druid/segment/DimensionHandlerUtils.java @@ -324,15 +324,18 @@ public final class DimensionHandlerUtils } } - public static Double nullToZero(@Nullable Double number) { + public static Double nullToZero(@Nullable Double number) + { return number == null ? ZERO_DOUBLE : number; } - public static Long nullToZero(@Nullable Long number) { + public static Long nullToZero(@Nullable Long number) + { return number == null ? ZERO_LONG : number; } - public static Float nullToZero(@Nullable Float number) { + public static Float nullToZero(@Nullable Float number) + { return number == null ? ZERO_FLOAT : number; } } diff --git a/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java index 9a9e1a6475d..87520d41ac8 100644 --- a/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/DoubleDimensionMergerV9.java @@ -19,7 +19,6 @@ package io.druid.segment; -import com.google.common.base.Throwables; import io.druid.java.util.common.io.Closer; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnDescriptor; @@ -61,8 +60,9 @@ public class DoubleDimensionMergerV9 implements DimensionMergerV9 try { setupEncodedValueWriter(); - } catch (IOException ioe) { - Throwables.propagate(ioe); + } + catch (IOException ioe) { + throw new RuntimeException(ioe); } } diff --git a/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java index ea8c987c9be..d65da367dc2 100644 --- a/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/FloatDimensionMergerV9.java @@ -19,7 +19,6 @@ package io.druid.segment; -import com.google.common.base.Throwables; import io.druid.java.util.common.io.Closer; import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnDescriptor; @@ -62,8 +61,9 @@ public class FloatDimensionMergerV9 implements DimensionMergerV9 try { setupEncodedValueWriter(); - } catch (IOException ioe) { - Throwables.propagate(ioe); + } + catch (IOException ioe) { + throw new RuntimeException(ioe); } } diff --git a/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java index 89c8f6e8181..c1afc2f6746 100644 --- a/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/LongDimensionMergerV9.java @@ -62,7 +62,8 @@ public class LongDimensionMergerV9 implements DimensionMergerV9 try { setupEncodedValueWriter(); - } catch (IOException ioe) { + } + catch (IOException ioe) { Throwables.propagate(ioe); } } diff --git a/processing/src/main/java/io/druid/segment/SegmentMissingException.java b/processing/src/main/java/io/druid/segment/SegmentMissingException.java index fb6be3a1ce9..5a9bda2ae42 100644 --- a/processing/src/main/java/io/druid/segment/SegmentMissingException.java +++ b/processing/src/main/java/io/druid/segment/SegmentMissingException.java @@ -24,11 +24,13 @@ import io.druid.java.util.common.StringUtils; public class SegmentMissingException extends ISE { - public SegmentMissingException(String formatText, Object... arguments) { + public SegmentMissingException(String formatText, Object... arguments) + { super(StringUtils.nonStrictFormat(formatText, arguments)); } - public SegmentMissingException(Throwable cause, String formatText, Object... arguments){ + public SegmentMissingException(Throwable cause, String formatText, Object... arguments) + { super(cause, formatText, arguments); } } diff --git a/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java b/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java index f7e41d57296..0415cd0bda1 100644 --- a/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java +++ b/processing/src/main/java/io/druid/segment/SingleScanTimeDimSelector.java @@ -118,15 +118,14 @@ public class SingleScanTimeDimSelector implements SingleValueDimensionSelector currentValue = extractionFn.apply(timestamp); ++index; timeValues.add(currentValue); - } - // if this is a new timestamp, apply and cache extraction function result - // since timestamps are assumed grouped and scanned once, we only need to - // check if the current timestamp is different than the current timestamp. - // - // If this new timestamp is mapped to the same value by the extraction function, - // we can also avoid creating a dimension value and corresponding index - // and use the current one - else if (timestamp != currentTimestamp) { + // if this is a new timestamp, apply and cache extraction function result + // since timestamps are assumed grouped and scanned once, we only need to + // check if the current timestamp is different than the current timestamp. + // + // If this new timestamp is mapped to the same value by the extraction function, + // we can also avoid creating a dimension value and corresponding index + // and use the current one + } else if (timestamp != currentTimestamp) { if (descending ? timestamp > currentTimestamp : timestamp < currentTimestamp) { // re-using this selector for multiple scans would cause the same rows to return different IDs // we might want to re-visit if we ever need to do multiple scans with this dimension selector diff --git a/processing/src/main/java/io/druid/segment/column/DoubleColumn.java b/processing/src/main/java/io/druid/segment/column/DoubleColumn.java index e0303ea72e9..3d53cded994 100644 --- a/processing/src/main/java/io/druid/segment/column/DoubleColumn.java +++ b/processing/src/main/java/io/druid/segment/column/DoubleColumn.java @@ -31,7 +31,10 @@ public class DoubleColumn extends AbstractColumn private final CompressedDoublesIndexedSupplier column; - public DoubleColumn(CompressedDoublesIndexedSupplier column) {this.column = column;} + public DoubleColumn(CompressedDoublesIndexedSupplier column) + { + this.column = column; + } @Override public int getLength() diff --git a/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java b/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java index 699543656af..af02877a364 100644 --- a/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/IndexedFloatsGenericColumn.java @@ -30,9 +30,8 @@ public class IndexedFloatsGenericColumn implements GenericColumn { private final IndexedFloats column; - public IndexedFloatsGenericColumn( - final IndexedFloats column - ) { + public IndexedFloatsGenericColumn(final IndexedFloats column) + { this.column = column; } diff --git a/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java b/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java index e2d74468a2e..ecfd3330066 100644 --- a/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java +++ b/processing/src/main/java/io/druid/segment/column/IndexedLongsGenericColumn.java @@ -30,9 +30,8 @@ public class IndexedLongsGenericColumn implements GenericColumn { private final IndexedLongs column; - public IndexedLongsGenericColumn( - final IndexedLongs column - ) { + public IndexedLongsGenericColumn(final IndexedLongs column) + { this.column = column; } diff --git a/processing/src/main/java/io/druid/segment/data/CompressedDoublesIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedDoublesIndexedSupplier.java index a48e7ea7fb4..26bbb316e53 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedDoublesIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedDoublesIndexedSupplier.java @@ -46,7 +46,8 @@ public class CompressedDoublesIndexedSupplier implements Supplier supplier, CompressedObjectStrategy.CompressionStrategy compression - ) { + ) + { this.totalSize = totalSize; this.sizePer = sizePer; this.buffer = buffer; diff --git a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java index 4426a15ac91..0a4842ffbda 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java @@ -76,7 +76,8 @@ public class CompressedIntsIndexedSupplier implements WritableSupplier extend this.sizePer = sizePer; } - public int getSize() { + public int getSize() + { return sizePer; } diff --git a/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java b/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java index 94e4cdc3792..e38b197fb8d 100644 --- a/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java +++ b/processing/src/main/java/io/druid/segment/data/IntersectingOffset.java @@ -23,14 +23,12 @@ import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; /** */ -public class IntersectingOffset extends Offset { +public class IntersectingOffset extends Offset +{ private final Offset lhs; private final Offset rhs; - public IntersectingOffset( - Offset lhs, - Offset rhs - ) + public IntersectingOffset(Offset lhs, Offset rhs) { this.lhs = lhs; this.rhs = rhs; @@ -39,12 +37,14 @@ public class IntersectingOffset extends Offset { } @Override - public int getOffset() { + public int getOffset() + { return lhs.getOffset(); } @Override - public void increment() { + public void increment() + { lhs.increment(); rhs.increment(); @@ -53,7 +53,7 @@ public class IntersectingOffset extends Offset { private void findIntersection() { - if (! (lhs.withinBounds() && rhs.withinBounds())) { + if (!(lhs.withinBounds() && rhs.withinBounds())) { return; } @@ -82,7 +82,8 @@ public class IntersectingOffset extends Offset { } @Override - public boolean withinBounds() { + public boolean withinBounds() + { return lhs.withinBounds() && rhs.withinBounds(); } diff --git a/processing/src/main/java/io/druid/segment/data/UnioningOffset.java b/processing/src/main/java/io/druid/segment/data/UnioningOffset.java index 225a18961f1..00b97719204 100644 --- a/processing/src/main/java/io/druid/segment/data/UnioningOffset.java +++ b/processing/src/main/java/io/druid/segment/data/UnioningOffset.java @@ -30,10 +30,7 @@ public class UnioningOffset extends Offset private int nextOffsetIndex; - public UnioningOffset( - Offset lhs, - Offset rhs - ) + public UnioningOffset(Offset lhs, Offset rhs) { if (lhs.withinBounds()) { offsets[0] = lhs; @@ -42,8 +39,7 @@ public class UnioningOffset extends Offset if (rhs.withinBounds()) { if (offsets[0] == null) { offsets[0] = rhs; - } - else { + } else { offsets[1] = rhs; } } @@ -68,7 +64,8 @@ public class UnioningOffset extends Offset this.nextOffsetIndex = nextOffsetIndex; } - private void figureOutNextValue() { + private void figureOutNextValue() + { if (offsets[0] != null) { if (offsets[1] != null) { int lhs = offsetVals[0]; @@ -79,28 +76,26 @@ public class UnioningOffset extends Offset } else if (lhs == rhs) { nextOffsetIndex = 0; rollIndexForward(1); - } - else { + } else { nextOffsetIndex = 1; } - } - else { + } else { nextOffsetIndex = 0; } } } - private void rollIndexForward(int i) { + private void rollIndexForward(int i) + { offsets[i].increment(); - if (! offsets[i].withinBounds()) { + if (!offsets[i].withinBounds()) { offsets[i] = null; if (i == 0) { offsets[0] = offsets[1]; offsetVals[0] = offsetVals[1]; } - } - else { + } else { offsetVals[i] = offsets[i].getOffset(); } } diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java index f2fb63bd69e..16731a93bff 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java @@ -197,14 +197,17 @@ public class VSizeIndexed implements IndexedMultivalue inspector.visit("theBuffer", theBuffer); } - public WritableSupplier> asWritableSupplier() { + public WritableSupplier> asWritableSupplier() + { return new VSizeIndexedSupplier(this); } - public static class VSizeIndexedSupplier implements WritableSupplier> { + public static class VSizeIndexedSupplier implements WritableSupplier> + { final VSizeIndexed delegate; - public VSizeIndexedSupplier(VSizeIndexed delegate) { + public VSizeIndexedSupplier(VSizeIndexed delegate) + { this.delegate = delegate; } diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java index a35c58813f5..cd3411f92ca 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexedInts.java @@ -102,11 +102,9 @@ public class VSizeIndexedInts implements IndexedInts, Comparable asWritableSupplier() { + public WritableSupplier asWritableSupplier() + { return new VSizeIndexedIntsSupplier(this); } - public static class VSizeIndexedIntsSupplier implements WritableSupplier { + public static class VSizeIndexedIntsSupplier implements WritableSupplier + { final VSizeIndexedInts delegate; - public VSizeIndexedIntsSupplier(VSizeIndexedInts delegate) { + public VSizeIndexedIntsSupplier(VSizeIndexedInts delegate) + { this.delegate = delegate; } diff --git a/processing/src/main/java/io/druid/segment/filter/ColumnComparisonFilter.java b/processing/src/main/java/io/druid/segment/filter/ColumnComparisonFilter.java index 83d5890416e..1d5c476ce39 100644 --- a/processing/src/main/java/io/druid/segment/filter/ColumnComparisonFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/ColumnComparisonFilter.java @@ -75,7 +75,8 @@ public class ColumnComparisonFilter implements Filter return makeValueMatcher(valueGetters); } - public static ValueMatcher makeValueMatcher(final ValueGetter[] valueGetters) { + public static ValueMatcher makeValueMatcher(final ValueGetter[] valueGetters) + { if (valueGetters.length == 0) { return BooleanValueMatcher.of(true); } @@ -110,7 +111,8 @@ public class ColumnComparisonFilter implements Filter // overlap returns true when: a and b have one or more elements in common, // a and b are both null, or a and b are both empty. - public static boolean overlap(String[] a, String[] b) { + public static boolean overlap(String[] a, String[] b) + { if (a == null || b == null) { // They only have overlap if both are null. return a == null && b == null; diff --git a/processing/src/main/java/io/druid/segment/filter/OrFilter.java b/processing/src/main/java/io/druid/segment/filter/OrFilter.java index d100f16bf7f..a078680a374 100644 --- a/processing/src/main/java/io/druid/segment/filter/OrFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/OrFilter.java @@ -107,7 +107,8 @@ public class OrFilter implements BooleanFilter } - private ValueMatcher makeMatcher(final ValueMatcher[] baseMatchers){ + private ValueMatcher makeMatcher(final ValueMatcher[] baseMatchers) + { Preconditions.checkState(baseMatchers.length > 0); if (baseMatchers.length == 1) { diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java index 6e189ecf197..895b1d37288 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndexStorageAdapter.java @@ -380,7 +380,8 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter done = !foundMatched && (emptyRange || !baseIter.hasNext()); } - private boolean beyondMaxRowIndex(int rowIndex) { + private boolean beyondMaxRowIndex(int rowIndex) + { // ignore rows whose rowIndex is beyond the maxRowIndex // rows are order by timestamp, not rowIndex, // so we still need to go through all rows to skip rows added after cursor created diff --git a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java index fbf94ffa362..2c017d8c260 100644 --- a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java @@ -223,7 +223,8 @@ public class OffheapIncrementalIndex extends IncrementalIndex synchronized (agg) { try { agg.aggregate(aggBuffer, bufferOffset + aggOffsetInBuffer[i]); - } catch (ParseException e) { + } + catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. if (reportParseExceptions) { throw new ParseException(e, "Encountered parse error for aggregator[%s]", getMetricAggs()[i].getName()); diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexColumnPartSupplier.java b/processing/src/main/java/io/druid/segment/serde/ComplexColumnPartSupplier.java index 1a89f392b1b..631f4447f73 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexColumnPartSupplier.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexColumnPartSupplier.java @@ -31,9 +31,8 @@ public class ComplexColumnPartSupplier implements Supplier private final GenericIndexed complexType; private final String typeName; - public ComplexColumnPartSupplier( - final String typeName, final GenericIndexed complexType - ) { + public ComplexColumnPartSupplier(final String typeName, final GenericIndexed complexType) + { this.complexType = complexType; this.typeName = typeName; } diff --git a/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java b/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java index 72b031c107c..cb86f47957c 100644 --- a/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java +++ b/processing/src/main/java/io/druid/segment/serde/DictionaryEncodedColumnPartSerde.java @@ -65,9 +65,15 @@ public class DictionaryEncodedColumnPartSerde implements ColumnPartSerde MULTI_VALUE, MULTI_VALUE_V3; - public boolean isSet(int flags) { return (getMask() & flags) != 0; } + public boolean isSet(int flags) + { + return (getMask() & flags) != 0; + } - public int getMask() { return (1 << ordinal()); } + public int getMask() + { + return (1 << ordinal()); + } } enum VERSION diff --git a/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnPartSerde.java b/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnPartSerde.java index d1e85fe22ac..df34dfee3b0 100644 --- a/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnPartSerde.java +++ b/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnPartSerde.java @@ -66,8 +66,7 @@ public class DoubleGenericColumnPartSerde implements ColumnPartSerde @Override public Deserializer getDeserializer() { - return (Deserializer) (buffer, builder, columnConfig) -> - { + return (buffer, builder, columnConfig) -> { final CompressedDoublesIndexedSupplier column = CompressedDoublesIndexedSupplier.fromByteBuffer( buffer, byteOrder, @@ -75,7 +74,7 @@ public class DoubleGenericColumnPartSerde implements ColumnPartSerde ); builder.setType(ValueType.DOUBLE) .setHasMultipleValues(false) - .setGenericColumn(new DoubleGenericColumnSupplier(column, byteOrder)); + .setGenericColumn(new DoubleGenericColumnSupplier(column)); }; } diff --git a/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnSupplier.java b/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnSupplier.java index 73a729efb00..4d9c2b2f72c 100644 --- a/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnSupplier.java +++ b/processing/src/main/java/io/druid/segment/serde/DoubleGenericColumnSupplier.java @@ -24,17 +24,14 @@ import io.druid.segment.column.GenericColumn; import io.druid.segment.column.IndexedDoublesGenericColumn; import io.druid.segment.data.CompressedDoublesIndexedSupplier; -import java.nio.ByteOrder; - public class DoubleGenericColumnSupplier implements Supplier { private final CompressedDoublesIndexedSupplier column; - private final ByteOrder byteOrder; - public DoubleGenericColumnSupplier(CompressedDoublesIndexedSupplier column, ByteOrder byteOrder) { + public DoubleGenericColumnSupplier(CompressedDoublesIndexedSupplier column) + { this.column = column; - this.byteOrder = byteOrder; } @Override diff --git a/processing/src/main/java/io/druid/segment/serde/FloatGenericColumnSupplier.java b/processing/src/main/java/io/druid/segment/serde/FloatGenericColumnSupplier.java index 80b88cdbc82..81c9a91e467 100644 --- a/processing/src/main/java/io/druid/segment/serde/FloatGenericColumnSupplier.java +++ b/processing/src/main/java/io/druid/segment/serde/FloatGenericColumnSupplier.java @@ -33,10 +33,8 @@ public class FloatGenericColumnSupplier implements Supplier private final CompressedFloatsIndexedSupplier column; private final ByteOrder byteOrder; - public FloatGenericColumnSupplier( - CompressedFloatsIndexedSupplier column, - ByteOrder byteOrder - ) { + public FloatGenericColumnSupplier(CompressedFloatsIndexedSupplier column, ByteOrder byteOrder) + { this.column = column; this.byteOrder = byteOrder; } diff --git a/processing/src/main/java/io/druid/segment/serde/LongGenericColumnSupplier.java b/processing/src/main/java/io/druid/segment/serde/LongGenericColumnSupplier.java index 51960437b5f..170d19dcd55 100644 --- a/processing/src/main/java/io/druid/segment/serde/LongGenericColumnSupplier.java +++ b/processing/src/main/java/io/druid/segment/serde/LongGenericColumnSupplier.java @@ -30,9 +30,8 @@ public class LongGenericColumnSupplier implements Supplier { private final CompressedLongsIndexedSupplier column; - public LongGenericColumnSupplier( - CompressedLongsIndexedSupplier column - ) { + public LongGenericColumnSupplier(CompressedLongsIndexedSupplier column) + { this.column = column; } diff --git a/processing/src/test/java/io/druid/guice/GuiceInjectorsTest.java b/processing/src/test/java/io/druid/guice/GuiceInjectorsTest.java index 6be450b11b5..d3ea923e251 100644 --- a/processing/src/test/java/io/druid/guice/GuiceInjectorsTest.java +++ b/processing/src/test/java/io/druid/guice/GuiceInjectorsTest.java @@ -53,7 +53,8 @@ public class GuiceInjectorsTest Assert.assertEquals("Expected String", customEmitter.getOtherValue()); } - private static class Emitter { + private static class Emitter + { @JacksonInject private String value; @@ -64,13 +65,15 @@ public class GuiceInjectorsTest } } - private static class CustomEmitterFactory implements Provider { + private static class CustomEmitterFactory implements Provider + { private Emitter emitter; private Injector injector; @Inject - public void configure(Injector injector) { + public void configure(Injector injector) + { this.injector = injector; emitter = injector.getInstance(Emitter.class); } @@ -90,7 +93,8 @@ public class GuiceInjectorsTest private Emitter emitter; - public CustomEmitter(Emitter emitter){ + public CustomEmitter(Emitter emitter) + { this.emitter = emitter; } } diff --git a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java index a5761454f63..83c205606b3 100644 --- a/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/AsyncQueryRunnerTest.java @@ -19,7 +19,6 @@ package io.druid.query; -import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.util.concurrent.ListenableFuture; @@ -46,7 +45,8 @@ public class AsyncQueryRunnerTest private final ExecutorService executor; private final Query query; - public AsyncQueryRunnerTest() { + public AsyncQueryRunnerTest() + { this.executor = Executors.newSingleThreadExecutor(); query = Druids.newTimeseriesQueryBuilder() .dataSource("test") @@ -56,7 +56,8 @@ public class AsyncQueryRunnerTest } @Test(timeout = TEST_TIMEOUT) - public void testAsyncNature() throws Exception { + public void testAsyncNature() throws Exception + { final CountDownLatch latch = new CountDownLatch(1); QueryRunner baseRunner = new QueryRunner() { @@ -66,14 +67,18 @@ public class AsyncQueryRunnerTest try { latch.await(); return Sequences.simple(Lists.newArrayList(1)); - } catch(InterruptedException ex) { - throw Throwables.propagate(ex); + } + catch (InterruptedException ex) { + throw new RuntimeException(ex); } } }; - AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, - QueryRunnerTestHelper.NOOP_QUERYWATCHER); + AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>( + baseRunner, + executor, + QueryRunnerTestHelper.NOOP_QUERYWATCHER + ); Sequence lazy = asyncRunner.run(query, Collections.EMPTY_MAP); latch.countDown(); @@ -81,7 +86,8 @@ public class AsyncQueryRunnerTest } @Test(timeout = TEST_TIMEOUT) - public void testQueryTimeoutHonored() { + public void testQueryTimeoutHonored() + { QueryRunner baseRunner = new QueryRunner() { @Override @@ -90,22 +96,28 @@ public class AsyncQueryRunnerTest try { Thread.sleep(Long.MAX_VALUE); throw new RuntimeException("query should not have completed"); - } catch(InterruptedException ex) { - throw Throwables.propagate(ex); + } + catch (InterruptedException ex) { + throw new RuntimeException(ex); } } }; - AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, - QueryRunnerTestHelper.NOOP_QUERYWATCHER); + AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>( + baseRunner, + executor, + QueryRunnerTestHelper.NOOP_QUERYWATCHER + ); Sequence lazy = asyncRunner.run( query.withOverriddenContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1)), - Collections.EMPTY_MAP); + Collections.EMPTY_MAP + ); try { Sequences.toList(lazy, Lists.newArrayList()); - } catch(RuntimeException ex) { + } + catch (RuntimeException ex) { Assert.assertTrue(ex.getCause() instanceof TimeoutException); return; } @@ -113,19 +125,22 @@ public class AsyncQueryRunnerTest } @Test - public void testQueryRegistration() { + public void testQueryRegistration() + { QueryRunner baseRunner = new QueryRunner() { @Override - public Sequence run(QueryPlus queryPlus, Map responseContext) { return null; } + public Sequence run(QueryPlus queryPlus, Map responseContext) + { + return null; + } }; QueryWatcher mock = EasyMock.createMock(QueryWatcher.class); mock.registerQuery(EasyMock.eq(query), EasyMock.anyObject(ListenableFuture.class)); EasyMock.replay(mock); - AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, - mock); + AsyncQueryRunner asyncRunner = new AsyncQueryRunner<>(baseRunner, executor, mock); asyncRunner.run(query, Collections.EMPTY_MAP); EasyMock.verify(mock); diff --git a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java index 6ec306db2fc..550f9464af5 100644 --- a/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/IntervalChunkingQueryRunnerTest.java @@ -42,14 +42,16 @@ public class IntervalChunkingQueryRunnerTest private final TimeseriesQueryBuilder queryBuilder; - public IntervalChunkingQueryRunnerTest() { + public IntervalChunkingQueryRunnerTest() + { queryBuilder = Druids.newTimeseriesQueryBuilder() .dataSource("test") .aggregators(Lists.newArrayList(new CountAggregatorFactory("count"))); } @Before - public void setup() { + public void setup() + { executors = EasyMock.createMock(ExecutorService.class); ServiceEmitter emitter = EasyMock.createNiceMock(ServiceEmitter.class); decorator = new IntervalChunkingQueryRunnerDecorator(executors, @@ -59,7 +61,8 @@ public class IntervalChunkingQueryRunnerTest } @Test - public void testDefaultNoChunking() { + public void testDefaultNoChunking() + { QueryPlus queryPlus = QueryPlus.wrap(queryBuilder.intervals("2014/2016").build()); EasyMock.expect(baseRunner.run(queryPlus, Collections.EMPTY_MAP)).andReturn(Sequences.empty()); @@ -72,7 +75,8 @@ public class IntervalChunkingQueryRunnerTest } @Test - public void testChunking() { + public void testChunking() + { Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-01-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1D")).build(); executors.execute(EasyMock.anyObject(Runnable.class)); @@ -88,7 +92,8 @@ public class IntervalChunkingQueryRunnerTest } @Test - public void testChunkingOnMonths() { + public void testChunkingOnMonths() + { Query query = queryBuilder.intervals("2015-01-01T00:00:00.000/2015-02-11T00:00:00.000").context(ImmutableMap.of("chunkPeriod", "P1M")).build(); executors.execute(EasyMock.anyObject(Runnable.class)); diff --git a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java index 559d12235f0..0196205bb4b 100644 --- a/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/io/druid/query/QueryRunnerTestHelper.java @@ -73,6 +73,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -611,4 +612,13 @@ public class QueryRunnerTestHelper QueryRunnerTestHelper.NOOP_QUERYWATCHER ); } + + public static Map orderedMap(Object... keyValues) + { + LinkedHashMap map = new LinkedHashMap<>(); + for (int i = 0; i < keyValues.length; i += 2) { + map.put(keyValues[i].toString(), keyValues[i + 1]); + } + return map; + } } diff --git a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java index 2cc8c27d17b..f9862ab7d09 100644 --- a/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/RetryQueryRunnerTest.java @@ -43,6 +43,30 @@ import java.util.Map; public class RetryQueryRunnerTest { + private static class TestRetryQueryRunnerConfig extends RetryQueryRunnerConfig + { + private int numTries; + private boolean returnPartialResults; + + public TestRetryQueryRunnerConfig(int numTries, boolean returnPartialResults) + { + this.numTries = numTries; + this.returnPartialResults = returnPartialResults; + } + + @Override + public int getNumTries() + { + return numTries; + } + + @Override + public boolean isReturnPartialResults() + { + return returnPartialResults; + } + } + private final ObjectMapper jsonMapper = TestHelper.getJsonMapper(); final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() @@ -160,16 +184,7 @@ public class RetryQueryRunnerTest (QueryToolChest) new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator() ), - new RetryQueryRunnerConfig() - { - private int numTries = 1; - private boolean returnPartialResults = true; - - @Override - public int getNumTries() { return numTries; } - - public boolean returnPartialResults() { return returnPartialResults; } - }, + new TestRetryQueryRunnerConfig(1, true), jsonMapper ); @@ -228,16 +243,7 @@ public class RetryQueryRunnerTest (QueryToolChest) new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator() ), - new RetryQueryRunnerConfig() - { - private int numTries = 4; - private boolean returnPartialResults = true; - - @Override - public int getNumTries() { return numTries; } - - public boolean returnPartialResults() { return returnPartialResults; } - }, + new TestRetryQueryRunnerConfig(4, true), jsonMapper ); @@ -281,16 +287,7 @@ public class RetryQueryRunnerTest (QueryToolChest) new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator() ), - new RetryQueryRunnerConfig() - { - private int numTries = 1; - private boolean returnPartialResults = false; - - @Override - public int getNumTries() { return numTries; } - - public boolean returnPartialResults() { return returnPartialResults; } - }, + new TestRetryQueryRunnerConfig(1, false), jsonMapper ); @@ -394,16 +391,7 @@ public class RetryQueryRunnerTest (QueryToolChest) new TimeseriesQueryQueryToolChest( QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator() ), - new RetryQueryRunnerConfig() - { - private int numTries = 2; - private boolean returnPartialResults = false; - - @Override - public int getNumTries() { return numTries; } - - public boolean returnPartialResults() { return returnPartialResults; } - }, + new TestRetryQueryRunnerConfig(2, false), jsonMapper ); diff --git a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java index fd7da345cf5..cca5a62ae10 100644 --- a/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/io/druid/query/aggregation/AggregationTestHelper.java @@ -507,8 +507,9 @@ public class AggregationTestHelper try { return runQueryOnSegmentsObjs(segments, query); - } finally { - for(Segment segment: segments) { + } + finally { + for (Segment segment : segments) { CloseQuietly.close(segment); } } @@ -587,7 +588,8 @@ public class AggregationTestHelper ) ); return Sequences.simple(resultRows); - } catch(Exception ex) { + } + catch (Exception ex) { throw Throwables.propagate(ex); } } diff --git a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java index d2eab1845e4..e2639fc1913 100644 --- a/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/FilteredAggregatorTest.java @@ -88,7 +88,8 @@ public class FilteredAggregatorTest assertValues(agg, selector, expectedFirst, expectedSecond, expectedThird); } - private ColumnSelectorFactory makeColumnSelector(final TestFloatColumnSelector selector){ + private ColumnSelectorFactory makeColumnSelector(final TestFloatColumnSelector selector) + { return new ColumnSelectorFactory() { @@ -247,7 +248,8 @@ public class FilteredAggregatorTest }; } - private void assertValues(FilteredAggregator agg,TestFloatColumnSelector selector, double... expectedVals){ + private void assertValues(FilteredAggregator agg,TestFloatColumnSelector selector, double... expectedVals) + { Assert.assertEquals(0.0d, agg.get()); Assert.assertEquals(0.0d, agg.get()); Assert.assertEquals(0.0d, agg.get()); diff --git a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java index 10b9ad7aca0..526e5ad04e5 100644 --- a/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/HistogramAggregatorTest.java @@ -54,7 +54,8 @@ public class HistogramAggregatorTest } @Test - public void testAggregate() throws Exception { + public void testAggregate() throws Exception + { final float[] values = {0.55f, 0.27f, -0.3f, -.1f, -0.8f, -.7f, -.5f, 0.25f, 0.1f, 2f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; @@ -100,7 +101,8 @@ public class HistogramAggregatorTest } @Test - public void testBufferAggregate() throws Exception { + public void testBufferAggregate() throws Exception + { final float[] values = {0.55f, 0.27f, -0.3f, -.1f, -0.8f, -.7f, -.5f, 0.25f, 0.1f, 2f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; diff --git a/processing/src/test/java/io/druid/query/aggregation/HistogramTest.java b/processing/src/test/java/io/druid/query/aggregation/HistogramTest.java index 78ec75fa7cf..ee30db6450f 100644 --- a/processing/src/test/java/io/druid/query/aggregation/HistogramTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/HistogramTest.java @@ -31,7 +31,8 @@ import java.util.Map; public class HistogramTest { @Test - public void testOffer() { + public void testOffer() + { final float[] values = {0.55f, 0.27f, -0.3f, -.1f, -0.8f, -.7f, -.5f, 0.25f, 0.1f, 2f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; @@ -50,7 +51,8 @@ public class HistogramTest * expose the issue of using Float's MIN_VALUE that is actually positive as initial value for {@link Histogram#max}. */ @Test - public void testOfferOnlyNegative() { + public void testOfferOnlyNegative() + { final float[] values = {-0.3f, -.1f, -0.8f, -.7f, -.5f, -3f}; final float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; @@ -65,7 +67,8 @@ public class HistogramTest } @Test - public void testToFromBytes() { + public void testToFromBytes() + { float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; long [] bins = { 23, 123, 4, 56, 7, 493210}; Histogram h = new Histogram(breaks, bins, -1f, 1f); @@ -74,7 +77,8 @@ public class HistogramTest } @Test - public void testAsVisual() throws Exception { + public void testAsVisual() throws Exception + { float[] breaks = {-1f, -0.5f, 0.0f, 0.5f, 1f}; long [] bins = { 23, 123, 4, 56, 7, 493210}; Histogram h = new Histogram(breaks, bins, -1f, 1f); diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java index 1df8b7d7667..b9f7be621ec 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java @@ -128,7 +128,8 @@ public class HyperUniquesAggregatorFactoryTest } @Test - public void testCompareToShouldBehaveConsistentlyWithEstimatedCardinalitiesEvenInToughCases() throws Exception { + public void testCompareToShouldBehaveConsistentlyWithEstimatedCardinalitiesEvenInToughCases() throws Exception + { // given Random rand = new Random(0); HyperUniquesAggregatorFactory factory = new HyperUniquesAggregatorFactory("foo", "bar"); diff --git a/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java b/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java index 7f10d128797..40f1d0c578f 100644 --- a/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/extraction/SubstringDimExtractionFnTest.java @@ -42,7 +42,8 @@ public class SubstringDimExtractionFnTest } @Test (expected = IllegalArgumentException.class) - public void testZeroLength() { + public void testZeroLength() + { ExtractionFn extractionFnNoLength = new SubstringDimExtractionFn(1,0); } diff --git a/processing/src/test/java/io/druid/query/filter/BoundDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/BoundDimFilterTest.java index 3af43d67fef..b9e3cef41d2 100644 --- a/processing/src/test/java/io/druid/query/filter/BoundDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/BoundDimFilterTest.java @@ -60,14 +60,11 @@ import java.util.Arrays; @RunWith(Parameterized.class) public class BoundDimFilterTest { - public BoundDimFilterTest(BoundDimFilter boundDimFilter) {this.boundDimFilter = boundDimFilter;} - - private final BoundDimFilter boundDimFilter; - private static final ExtractionFn extractionFn = new RegexDimExtractionFn(".*", false, null); @Parameterized.Parameters - public static Iterable constructorFeeder(){ + public static Iterable constructorFeeder() + { return ImmutableList.of( new Object[]{new BoundDimFilter("dimension", "12", "15", null, null, null, null, @@ -91,6 +88,13 @@ public class BoundDimFilterTest ); } + private final BoundDimFilter boundDimFilter; + + public BoundDimFilterTest(BoundDimFilter boundDimFilter) + { + this.boundDimFilter = boundDimFilter; + } + @Test public void testSerDesBoundFilter() throws IOException { diff --git a/processing/src/test/java/io/druid/query/filter/DimFilterUtilsTest.java b/processing/src/test/java/io/druid/query/filter/DimFilterUtilsTest.java index fb881c097e2..421aa47bae3 100644 --- a/processing/src/test/java/io/druid/query/filter/DimFilterUtilsTest.java +++ b/processing/src/test/java/io/druid/query/filter/DimFilterUtilsTest.java @@ -52,7 +52,8 @@ public class DimFilterUtilsTest }; @Test - public void testFilterShards() { + public void testFilterShards() + { DimFilter filter1 = EasyMock.createMock(DimFilter.class); EasyMock.expect(filter1.getDimensionRangeSet("dim1")) .andReturn(rangeSet(ImmutableList.of(Range.lessThan("abc")))) @@ -87,7 +88,8 @@ public class DimFilterUtilsTest assertFilterResult(filter2, shards, expected2); } - private void assertFilterResult(DimFilter filter, Iterable input, Set expected) { + private void assertFilterResult(DimFilter filter, Iterable input, Set expected) + { Set result = DimFilterUtils.filterShards(filter, input, CONVERTER); Assert.assertEquals(expected, result); @@ -99,7 +101,8 @@ public class DimFilterUtilsTest Assert.assertEquals(expected, result); } - private static RangeSet rangeSet(List> ranges) { + private static RangeSet rangeSet(List> ranges) + { ImmutableRangeSet.Builder builder = ImmutableRangeSet.builder(); for (Range range : ranges) { builder.add(range); @@ -107,7 +110,8 @@ public class DimFilterUtilsTest return builder.build(); } - private static ShardSpec shardSpec(String dimension, Range range) { + private static ShardSpec shardSpec(String dimension, Range range) + { ShardSpec shard = EasyMock.createMock(ShardSpec.class); EasyMock.expect(shard.getDomain()) .andReturn(ImmutableMap.of(dimension, range)) diff --git a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java index b0bb3902b0e..d335e770f27 100644 --- a/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java +++ b/processing/src/test/java/io/druid/query/filter/GetDimensionRangeSetTest.java @@ -85,7 +85,8 @@ public class GetDimensionRangeSetTest private static final RangeSet empty = rangeSet(ImmutableList.>of()); @Test - public void testSimpleFilter () { + public void testSimpleFilter() + { RangeSet expected1 = rangeSet(point("a")); Assert.assertEquals(expected1, selector1.getDimensionRangeSet("dim1")); Assert.assertNull(selector1.getDimensionRangeSet("dim2")); @@ -117,7 +118,8 @@ public class GetDimensionRangeSetTest } @Test - public void testAndFilter () { + public void testAndFilter() + { DimFilter and1 = new AndDimFilter(ImmutableList.of(selector1, selector2, in1)); Assert.assertEquals(empty, and1.getDimensionRangeSet("dim1")); Assert.assertNull(and1.getDimensionRangeSet("dim2")); @@ -141,7 +143,8 @@ public class GetDimensionRangeSetTest } @Test - public void testOrFilter () { + public void testOrFilter() + { DimFilter or1 = new OrDimFilter(ImmutableList.of(selector1, selector2, selector5)); RangeSet expected1 = rangeSet(ImmutableList.of(point(""), point("a"), point("z"))); Assert.assertEquals(expected1, or1.getDimensionRangeSet("dim1")); @@ -165,7 +168,8 @@ public class GetDimensionRangeSetTest } @Test - public void testNotFilter () { + public void testNotFilter() + { DimFilter not1 = new NotDimFilter(selector1); RangeSet expected1 = rangeSet(ImmutableList.of(Range.lessThan("a"), Range.greaterThan("a"))); Assert.assertEquals(expected1, not1.getDimensionRangeSet("dim1")); @@ -210,15 +214,18 @@ public class GetDimensionRangeSetTest } - private static Range point(String s) { + private static Range point(String s) + { return Range.singleton(s); } - private static RangeSet rangeSet (Range ranges) { + private static RangeSet rangeSet(Range ranges) + { return ImmutableRangeSet.of(ranges); } - private static RangeSet rangeSet (List> ranges) { + private static RangeSet rangeSet(List> ranges) + { ImmutableRangeSet.Builder builder = ImmutableRangeSet.builder(); for (Range range : ranges) { builder.add(range); diff --git a/processing/src/test/java/io/druid/query/filter/RegexDimFilterTest.java b/processing/src/test/java/io/druid/query/filter/RegexDimFilterTest.java index 7643861b256..043399011e9 100644 --- a/processing/src/test/java/io/druid/query/filter/RegexDimFilterTest.java +++ b/processing/src/test/java/io/druid/query/filter/RegexDimFilterTest.java @@ -60,7 +60,8 @@ public class RegexDimFilterTest } @Test - public void testHashcode() { + public void testHashcode() + { RegexDimFilter regexDimFilter = new RegexDimFilter("dim", "reg", null); RegexDimFilter regexDimFilter2 = new RegexDimFilter("di", "mreg", null); RegexDimFilter regexDimFilter3 = new RegexDimFilter("di", "mreg", null); diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java index 89937637fca..ed378aebbcf 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerFactoryTest.java @@ -105,7 +105,8 @@ public class GroupByQueryRunnerFactoryTest ) ) ); - } catch (Exception e) { + } + catch (Exception e) { Throwables.propagate(e); return null; } diff --git a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java index 39543b48efc..762651ad9c3 100644 --- a/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java +++ b/processing/src/test/java/io/druid/query/groupby/having/HavingSpecTest.java @@ -43,7 +43,8 @@ public class HavingSpecTest private static final Row ROW = new MapBasedInputRow(0, new ArrayList(), ImmutableMap.of("metric", (Object)Float.valueOf(10))); @Test - public void testHavingClauseSerde() throws Exception { + public void testHavingClauseSerde() throws Exception + { List havings = Arrays.asList( new GreaterThanHavingSpec("agg", Double.valueOf(1.3)), new OrHavingSpec( @@ -101,7 +102,8 @@ public class HavingSpecTest } @Test - public void testGreaterThanHavingSpec() { + public void testGreaterThanHavingSpec() + { GreaterThanHavingSpec spec = new GreaterThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15)))); @@ -118,7 +120,8 @@ public class HavingSpecTest } @Test - public void testLessThanHavingSpec() { + public void testLessThanHavingSpec() + { LessThanHavingSpec spec = new LessThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15)))); @@ -140,7 +143,8 @@ public class HavingSpecTest } @Test - public void testEqualHavingSpec() { + public void testEqualHavingSpec() + { EqualToHavingSpec spec = new EqualToHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5)))); @@ -152,7 +156,8 @@ public class HavingSpecTest assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); } - private static class CountingHavingSpec extends BaseHavingSpec { + private static class CountingHavingSpec extends BaseHavingSpec + { private final AtomicInteger counter; private final boolean value; @@ -171,7 +176,8 @@ public class HavingSpecTest } @Test - public void testAndHavingSpecShouldSupportShortcutEvaluation () { + public void testAndHavingSpecShouldSupportShortcutEvaluation() + { AtomicInteger counter = new AtomicInteger(0); AndHavingSpec spec = new AndHavingSpec(ImmutableList.of( (HavingSpec)new CountingHavingSpec(counter, true), @@ -186,7 +192,8 @@ public class HavingSpecTest } @Test - public void testAndHavingSpec () { + public void testAndHavingSpec() + { AtomicInteger counter = new AtomicInteger(0); AndHavingSpec spec = new AndHavingSpec(ImmutableList.of( (HavingSpec)new CountingHavingSpec(counter, true), @@ -213,7 +220,8 @@ public class HavingSpecTest } @Test - public void testOrHavingSpecSupportsShortcutEvaluation() { + public void testOrHavingSpecSupportsShortcutEvaluation() + { AtomicInteger counter = new AtomicInteger(0); OrHavingSpec spec = new OrHavingSpec(ImmutableList.of( (HavingSpec)new CountingHavingSpec(counter, true), @@ -228,7 +236,8 @@ public class HavingSpecTest } @Test - public void testOrHavingSpec () { + public void testOrHavingSpec() + { AtomicInteger counter = new AtomicInteger(0); OrHavingSpec spec = new OrHavingSpec(ImmutableList.of( (HavingSpec)new CountingHavingSpec(counter, false), @@ -255,7 +264,8 @@ public class HavingSpecTest } @Test - public void testNotHavingSepc() { + public void testNotHavingSepc() + { NotHavingSpec spec = new NotHavingSpec(HavingSpec.NEVER); assertTrue(spec.eval(ROW)); diff --git a/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java b/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java index d08456b062a..49d23ca4e58 100644 --- a/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java +++ b/processing/src/test/java/io/druid/query/groupby/orderby/TopNSequenceTest.java @@ -53,7 +53,8 @@ public class TopNSequenceTest private int limit; @Parameterized.Parameters - public static Collection makeTestData(){ + public static Collection makeTestData() + { Object[][] data = new Object[][] { { ASC, RAW_ASC, RAW_ASC.size() - 2}, { ASC, RAW_ASC, RAW_ASC.size()}, @@ -80,7 +81,8 @@ public class TopNSequenceTest return Arrays.asList(data); } - public TopNSequenceTest(Ordering ordering, List rawInput, int limit){ + public TopNSequenceTest(Ordering ordering, List rawInput, int limit) + { this.ordering = ordering; this.rawInput = rawInput; this.limit = limit; diff --git a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java index 1ce099a8286..04b006704e1 100644 --- a/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java +++ b/processing/src/test/java/io/druid/query/lookup/LookupExtractionFnTest.java @@ -80,7 +80,8 @@ public class LookupExtractionFnTest } @Test - public void testEqualsAndHash(){ + public void testEqualsAndHash() + { if (retainMissing && !Strings.isNullOrEmpty(replaceMissing)) { // skip return; diff --git a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java index b6edd063092..4c7d112b314 100644 --- a/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchBinaryFnTest.java @@ -302,7 +302,8 @@ public class SearchBinaryFnTest } // merge function expects input to be sorted as per comparator - private List toHits(Comparator comparator, String... hits) { + private List toHits(Comparator comparator, String... hits) + { List result = new ArrayList<>(); for (String hit : hits) { int index = hit.indexOf(':'); @@ -337,7 +338,8 @@ public class SearchBinaryFnTest } @Test - public void testMergeLimit(){ + public void testMergeLimit() + { Result r1 = new Result( currTime, new SearchResultValue( @@ -368,7 +370,8 @@ public class SearchBinaryFnTest } @Test - public void testMergeCountWithNull() { + public void testMergeCountWithNull() + { Result r1 = new Result( currTime, new SearchResultValue( diff --git a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java index 3e06f324aa2..e703deb5422 100644 --- a/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/select/SelectQueryRunnerTest.java @@ -148,7 +148,8 @@ public class SelectQueryRunnerTest this.descending = descending; } - private Druids.SelectQueryBuilder newTestQuery() { + private Druids.SelectQueryBuilder newTestQuery() + { return Druids.newSelectQueryBuilder() .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) .dimensionSpecs(DefaultDimensionSpec.toSpec(Arrays.asList())) @@ -566,7 +567,8 @@ public class SelectQueryRunnerTest } @Test - public void testSelectWithFilterLookupExtractionFn () { + public void testSelectWithFilterLookupExtractionFn() + { Map extractionMap = new HashMap<>(); extractionMap.put("total_market","replaced"); diff --git a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index 34362fc5e6a..1cc0829d45c 100644 --- a/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -140,7 +140,8 @@ public class TimeBoundaryQueryRunnerTest ); } - private QueryRunner getCustomRunner() throws IOException { + private QueryRunner getCustomRunner() throws IOException + { CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n")); CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n")); diff --git a/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java b/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java index 34c102eb3a5..4e66f8094b3 100644 --- a/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java +++ b/processing/src/test/java/io/druid/query/topn/AlphaNumericTopNMetricSpecTest.java @@ -92,7 +92,8 @@ public class AlphaNumericTopNMetricSpecTest } @Test - public void testSerdeAlphaNumericTopNMetricSpec() throws IOException{ + public void testSerdeAlphaNumericTopNMetricSpec() throws IOException + { AlphaNumericTopNMetricSpec expectedMetricSpec = new AlphaNumericTopNMetricSpec(null); AlphaNumericTopNMetricSpec expectedMetricSpec1 = new AlphaNumericTopNMetricSpec("test"); String jsonSpec = "{\n" diff --git a/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java b/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java index 50888159545..bc423b2464a 100644 --- a/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java +++ b/processing/src/test/java/io/druid/query/topn/DimensionTopNMetricSpecTest.java @@ -30,7 +30,8 @@ import java.io.IOException; public class DimensionTopNMetricSpecTest { @Test - public void testSerdeAlphaNumericDimensionTopNMetricSpec() throws IOException{ + public void testSerdeAlphaNumericDimensionTopNMetricSpec() throws IOException + { DimensionTopNMetricSpec expectedMetricSpec = new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC); DimensionTopNMetricSpec expectedMetricSpec1 = new DimensionTopNMetricSpec("test", StringComparators.ALPHANUMERIC); String jsonSpec = "{\n" @@ -50,7 +51,8 @@ public class DimensionTopNMetricSpecTest } @Test - public void testSerdeLexicographicDimensionTopNMetricSpec() throws IOException{ + public void testSerdeLexicographicDimensionTopNMetricSpec() throws IOException + { DimensionTopNMetricSpec expectedMetricSpec = new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC); DimensionTopNMetricSpec expectedMetricSpec1 = new DimensionTopNMetricSpec("test", StringComparators.LEXICOGRAPHIC); String jsonSpec = "{\n" @@ -70,7 +72,8 @@ public class DimensionTopNMetricSpecTest } @Test - public void testSerdeStrlenDimensionTopNMetricSpec() throws IOException{ + public void testSerdeStrlenDimensionTopNMetricSpec() throws IOException + { DimensionTopNMetricSpec expectedMetricSpec = new DimensionTopNMetricSpec(null, StringComparators.STRLEN); DimensionTopNMetricSpec expectedMetricSpec1 = new DimensionTopNMetricSpec("test", StringComparators.STRLEN); String jsonSpec = "{\n" @@ -90,7 +93,8 @@ public class DimensionTopNMetricSpecTest } @Test - public void testSerdeNumericDimensionTopNMetricSpec() throws IOException{ + public void testSerdeNumericDimensionTopNMetricSpec() throws IOException + { DimensionTopNMetricSpec expectedMetricSpec = new DimensionTopNMetricSpec(null, StringComparators.NUMERIC); DimensionTopNMetricSpec expectedMetricSpec1 = new DimensionTopNMetricSpec("test", StringComparators.NUMERIC); String jsonSpec = "{\n" diff --git a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java index 4a0b87b0997..62c523a7fb7 100644 --- a/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/topn/TopNQueryRunnerTest.java @@ -106,7 +106,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -1921,15 +1920,14 @@ public class TopNQueryRunnerTest new Result<>( new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - new LinkedHashMap() - {{ - put("doesn't exist", null); - put("rows", 26L); - put("index", 12459.361190795898D); - put("addRowsIndexConstant", 12486.361190795898D); - put("uniques", QueryRunnerTestHelper.UNIQUES_9); - }} + Collections.>singletonList( + QueryRunnerTestHelper.orderedMap( + "doesn't exist", null, + "rows", 26L, + "index", 12459.361190795898D, + "addRowsIndexConstant", 12486.361190795898D, + "uniques", QueryRunnerTestHelper.UNIQUES_9 + ) ) ) ) @@ -1956,15 +1954,14 @@ public class TopNQueryRunnerTest new Result<>( new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - new LinkedHashMap() - {{ - put("doesn't exist", null); - put("rows", 4L); - put("index", 4875.669692D); - put("addRowsIndexConstant", 4880.669692D); - put("uniques", QueryRunnerTestHelper.UNIQUES_2); - }} + Collections.>singletonList( + QueryRunnerTestHelper.orderedMap( + "doesn't exist", null, + "rows", 4L, + "index", 4875.669692D, + "addRowsIndexConstant", 4880.669692D, + "uniques", QueryRunnerTestHelper.UNIQUES_2 + ) ) ) ) @@ -1991,15 +1988,14 @@ public class TopNQueryRunnerTest new Result<>( new DateTime("2011-04-01T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - new LinkedHashMap() - {{ - put("doesn't exist", null); - put("rows", 26L); - put("index", 12459.361190795898D); - put("addRowsIndexConstant", 12486.361190795898D); - put("uniques", QueryRunnerTestHelper.UNIQUES_9); - }} + Collections.>singletonList( + QueryRunnerTestHelper.orderedMap( + "doesn't exist", null, + "rows", 26L, + "index", 12459.361190795898D, + "addRowsIndexConstant", 12486.361190795898D, + "uniques", QueryRunnerTestHelper.UNIQUES_9 + ) ) ) ) @@ -3289,14 +3285,13 @@ public class TopNQueryRunnerTest "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - new LinkedHashMap() - {{ - put(QueryRunnerTestHelper.marketDimension, null); - put("rows", 4L); - put("index", 5351.814783D); - put("addRowsIndexConstant", 5356.814783D); - put("uniques", QueryRunnerTestHelper.UNIQUES_2); - }}, + QueryRunnerTestHelper.orderedMap( + QueryRunnerTestHelper.marketDimension, null, + "rows", 4L, + "index", 5351.814783D, + "addRowsIndexConstant", 5356.814783D, + "uniques", QueryRunnerTestHelper.UNIQUES_2 + ), ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, @@ -3378,14 +3373,13 @@ public class TopNQueryRunnerTest "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), - new LinkedHashMap() - {{ - put(QueryRunnerTestHelper.marketDimension, ""); - put("rows", 4L); - put("index", 5351.814783D); - put("addRowsIndexConstant", 5356.814783D); - put("uniques", QueryRunnerTestHelper.UNIQUES_2); - }}, + QueryRunnerTestHelper.orderedMap( + QueryRunnerTestHelper.marketDimension, "", + "rows", 4L, + "index", 5351.814783D, + "addRowsIndexConstant", 5356.814783D, + "uniques", QueryRunnerTestHelper.UNIQUES_2 + ), ImmutableMap.of( QueryRunnerTestHelper.marketDimension, "upfront", "rows", 4L, @@ -4200,7 +4194,8 @@ public class TopNQueryRunnerTest assertExpectedResults(expectedResults, topNQueryWithNULLValueExtraction); } - private Sequence> runWithPreMergeAndMerge(TopNQuery query){ + private Sequence> runWithPreMergeAndMerge(TopNQuery query) + { return runWithPreMergeAndMerge(query, ImmutableMap.of()); } diff --git a/processing/src/test/java/io/druid/segment/AppendTest.java b/processing/src/test/java/io/druid/segment/AppendTest.java index a53741bd0bb..dbd72079d76 100644 --- a/processing/src/test/java/io/druid/segment/AppendTest.java +++ b/processing/src/test/java/io/druid/segment/AppendTest.java @@ -27,6 +27,7 @@ import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Druids; import io.druid.query.QueryRunner; +import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; @@ -326,16 +327,15 @@ public class AppendTest .put("maxIndex", 100.0) .put("minIndex", 100.0) .build(), - new HashMap() - {{ - put("market", null); - put("rows", 3L); - put("index", 200.0D); - put("addRowsIndexConstant", 204.0D); - put("uniques", 0.0D); - put("maxIndex", 100.0); - put("minIndex", 0.0); - }}, + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 3L, + "index", 200.0D, + "addRowsIndexConstant", 204.0D, + "uniques", 0.0D, + "maxIndex", 100.0, + "minIndex", 0.0 + ), ImmutableMap.builder() .put("market", "total_market") .put("rows", 2L) @@ -373,16 +373,15 @@ public class AppendTest .put("maxIndex", 100.0D) .put("minIndex", 100.0D) .build(), - new HashMap() - {{ - put("market", null); - put("rows", 3L); - put("index", 100.0D); - put("addRowsIndexConstant", 104.0D); - put("uniques", 0.0D); - put("maxIndex", 100.0); - put("minIndex", 0.0); - }}, + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 3L, + "index", 100.0D, + "addRowsIndexConstant", 104.0D, + "uniques", 0.0D, + "maxIndex", 100.0, + "minIndex", 0.0 + ), ImmutableMap.builder() .put("market", "spot") .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/CompressedVSizeIndexedV3SupplierTest.java b/processing/src/test/java/io/druid/segment/CompressedVSizeIndexedV3SupplierTest.java index 717c00a7c38..e807bf165e6 100644 --- a/processing/src/test/java/io/druid/segment/CompressedVSizeIndexedV3SupplierTest.java +++ b/processing/src/test/java/io/druid/segment/CompressedVSizeIndexedV3SupplierTest.java @@ -38,7 +38,8 @@ public class CompressedVSizeIndexedV3SupplierTest extends CompressedVSizeIndexed { @Override @Before - public void setUpSimple(){ + public void setUpSimple() + { vals = Arrays.asList( new int[1], new int[]{1, 2, 3, 4, 5}, @@ -64,7 +65,8 @@ public class CompressedVSizeIndexedV3SupplierTest extends CompressedVSizeIndexed @Override @After - public void teardown(){ + public void teardown() + { indexedSupplier = null; vals = null; } diff --git a/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java b/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java index 9b34cda11ef..dbbee144208 100644 --- a/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java +++ b/processing/src/test/java/io/druid/segment/NullDimensionSelectorTest.java @@ -25,12 +25,14 @@ import org.junit.Test; import java.util.Iterator; -public class NullDimensionSelectorTest { +public class NullDimensionSelectorTest +{ private final NullDimensionSelector selector = NullDimensionSelector.instance(); @Test - public void testGetRow() throws Exception { + public void testGetRow() throws Exception + { IndexedInts row = selector.getRow(); Assert.assertEquals(1, row.size()); Assert.assertEquals(0, row.get(0)); @@ -42,17 +44,20 @@ public class NullDimensionSelectorTest { } @Test - public void testGetValueCardinality() throws Exception { + public void testGetValueCardinality() throws Exception + { Assert.assertEquals(1, selector.getValueCardinality()); } @Test - public void testLookupName() throws Exception { + public void testLookupName() throws Exception + { Assert.assertEquals(null, selector.lookupName(0)); } @Test - public void testLookupId() throws Exception { + public void testLookupId() throws Exception + { Assert.assertEquals(0, selector.idLookup().lookupId(null)); Assert.assertEquals(0, selector.idLookup().lookupId("")); Assert.assertEquals(-1, selector.idLookup().lookupId("billy")); diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index 6b0f3fe0912..a24b0eb92b2 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -29,6 +29,7 @@ import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; +import io.druid.query.QueryRunnerTestHelper; import io.druid.query.Result; import io.druid.query.TestQueryRunners; import io.druid.query.aggregation.AggregatorFactory; @@ -58,8 +59,8 @@ import org.joda.time.Interval; import org.junit.Test; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; -import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -418,16 +419,15 @@ public class SchemalessTestFullTest .put("maxIndex", 100.0) .put("minIndex", 100.0) .build(), - new HashMap() - {{ - put("market", null); - put("rows", 1L); - put("index", 0.0D); - put("addRowsIndexConstant", 2.0D); - put("uniques", 0.0D); - put("maxIndex", 0.0); - put("minIndex", 0.0); - }} + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 1L, + "index", 0.0D, + "addRowsIndexConstant", 2.0D, + "uniques", 0.0D, + "maxIndex", 0.0, + "minIndex", 0.0 + ) ) ) ) @@ -766,16 +766,15 @@ public class SchemalessTestFullTest .put("maxIndex", 100.0) .put("minIndex", 100.0) .build(), - new HashMap() - {{ - put("market", null); - put("rows", 1L); - put("index", 0.0D); - put("addRowsIndexConstant", 2.0D); - put("uniques", 0.0D); - put("maxIndex", 0.0); - put("minIndex", 0.0); - }} + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 1L, + "index", 0.0D, + "addRowsIndexConstant", 2.0D, + "uniques", 0.0D, + "maxIndex", 0.0, + "minIndex", 0.0 + ) ) ) ) @@ -880,17 +879,16 @@ public class SchemalessTestFullTest new Result<>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.>asList( - new LinkedHashMap(){{ - put("market", null); - put("rows", 1L); - put("index", 0.0D); - put("addRowsIndexConstant", 2.0D); - put("uniques", 0.0D); - put("maxIndex", 0.0); - put("minIndex", 0.0); - - }} + Collections.singletonList( + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 1L, + "index", 0.0D, + "addRowsIndexConstant", 2.0D, + "uniques", 0.0D, + "maxIndex", 0.0, + "minIndex", 0.0 + ) ) ) ) @@ -899,7 +897,7 @@ public class SchemalessTestFullTest new Result<>( new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( - Arrays.asList() + Collections.emptyList() ) ) ); @@ -908,7 +906,7 @@ public class SchemalessTestFullTest new Result<>( new DateTime("2011-01-12T00:00:00.000Z"), new SearchResultValue( - Arrays.asList() + Collections.emptyList() ) ) ); @@ -1068,16 +1066,15 @@ public class SchemalessTestFullTest new DateTime("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.>asList( - new HashMap() - {{ - put("market", null); - put("rows", 2L); - put("index", 200.0D); - put("addRowsIndexConstant", 203.0D); - put("uniques", 0.0D); - put("maxIndex", 100.0); - put("minIndex", 100.0); - }}, + QueryRunnerTestHelper.orderedMap( + "market", null, + "rows", 2L, + "index", 200.0D, + "addRowsIndexConstant", 203.0D, + "uniques", 0.0D, + "maxIndex", 100.0, + "minIndex", 100.0 + ), ImmutableMap.builder() .put("market", "spot") .put("rows", 1L) diff --git a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java index 226df78ec1b..c98fe959ee9 100644 --- a/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java +++ b/processing/src/test/java/io/druid/segment/StringDimensionHandlerTest.java @@ -60,7 +60,8 @@ public class StringDimensionHandlerTest List dims, Map event1, Map event2 - ) throws Exception { + ) throws Exception + { IncrementalIndex incrementalIndex1 = new IncrementalIndex.Builder() .setIndexSchema( new IncrementalIndexSchema.Builder() @@ -100,7 +101,8 @@ public class StringDimensionHandlerTest return new Pair<>(adapter1, adapter2); } - private static void validate(IncrementalIndexAdapter adapter1, IncrementalIndexAdapter adapter2) throws Exception { + private static void validate(IncrementalIndexAdapter adapter1, IncrementalIndexAdapter adapter2) throws Exception + { Map handlers = adapter1.getDimensionHandlers(); Indexed dimNames1 = adapter1.getDimensionNames(); Indexed dimNames2 = adapter2.getDimensionNames(); @@ -129,7 +131,8 @@ public class StringDimensionHandlerTest } @Test - public void testValidateSortedEncodedArrays() throws Exception { + public void testValidateSortedEncodedArrays() throws Exception + { Map event1 = ImmutableMap.of( "penguins", Arrays.asList("adelie", "emperor"), "predators", Arrays.asList("seal") @@ -150,7 +153,8 @@ public class StringDimensionHandlerTest public ExpectedException exception = ExpectedException.none(); @Test - public void testValidateSortedDifferentEncodedArrays() throws Exception { + public void testValidateSortedDifferentEncodedArrays() throws Exception + { Map event1 = ImmutableMap.of( "penguins", Arrays.asList("adelie", "emperor"), "predators", Collections.singletonList("seal") diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java index 1282d80fe6a..c62b0259c9c 100644 --- a/processing/src/test/java/io/druid/segment/TestHelper.java +++ b/processing/src/test/java/io/druid/segment/TestHelper.java @@ -166,8 +166,7 @@ public class TestHelper // Special to allow a floating point delta to be used in result comparison due to legacy expected results assertTopNResultValue(failMsg, (Result) expectedNext, (Result) next); assertTopNResultValue(String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), (Result) expectedNext, (Result) next2); - } - else { + } else { assertResult(failMsg, (Result) expectedNext, (Result) next); assertResult( StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), diff --git a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java index acdbd288acb..73d542ab2b7 100644 --- a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java +++ b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java @@ -95,7 +95,8 @@ public class BenchmarkIndexibleWrites extends AbstractBenchmark } @Override - public void clear(){ + public void clear() + { delegate.clear(); } } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java index 4a74be54392..3583706973d 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java @@ -77,7 +77,8 @@ public class CompressedLongsSerdeTest private final long values8[] = {Long.MAX_VALUE, 0, 321, 15248425, 13523212136L, 63822, 3426, 96}; // built test value with enough unique values to not use table encoding for auto strategy - private static long[] addUniques(long[] val) { + private static long[] addUniques(long[] val) + { long[] ret = new long[val.length + CompressionFactory.MAX_TABLE_SIZE]; for (int i = 0; i < CompressionFactory.MAX_TABLE_SIZE; i++) { ret[i] = i; diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java index 6111defe400..976c34d0460 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java @@ -213,7 +213,8 @@ public class CompressedVSizeIntsIndexedSupplierTest extends CompressionStrategyT try { setupLargeChunks(maxChunkSize + 1, 10 * (maxChunkSize + 1), maxValue); Assert.fail(); - } catch(IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { Assert.assertTrue("chunk too big for maxValue " + maxValue, true); } } diff --git a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java index b28409bed0b..98d942ba600 100644 --- a/processing/src/test/java/io/druid/segment/filter/InFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/InFilterTest.java @@ -247,7 +247,8 @@ public class InFilterTest extends BaseFilterTest } @Test - public void testMatchWithLookupExtractionFn() { + public void testMatchWithLookupExtractionFn() + { final Map stringMap = ImmutableMap.of( "a", "HELLO", "10", "HELLO", diff --git a/processing/src/test/java/io/druid/segment/filter/RowboatTest.java b/processing/src/test/java/io/druid/segment/filter/RowboatTest.java index bc2bc94d273..3789e72ff52 100644 --- a/processing/src/test/java/io/druid/segment/filter/RowboatTest.java +++ b/processing/src/test/java/io/druid/segment/filter/RowboatTest.java @@ -30,7 +30,8 @@ import org.junit.Test; */ public class RowboatTest { - private static DimensionHandler[] getDefaultHandlers(int size) { + private static DimensionHandler[] getDefaultHandlers(int size) + { DimensionHandler[] handlers = new DimensionHandler[size]; for (int i = 0; i < size; i++) { handlers[i] = new StringDimensionHandler(String.valueOf(i), null); diff --git a/processing/src/test/java/io/druid/segment/loading/SegmentizerFactoryTest.java b/processing/src/test/java/io/druid/segment/loading/SegmentizerFactoryTest.java index aa462b37722..a05e4e98389 100644 --- a/processing/src/test/java/io/druid/segment/loading/SegmentizerFactoryTest.java +++ b/processing/src/test/java/io/druid/segment/loading/SegmentizerFactoryTest.java @@ -36,7 +36,8 @@ import java.nio.file.Files; public class SegmentizerFactoryTest { @Test - public void testFactory() throws IOException{ + public void testFactory() throws IOException + { File factoryFile = Files.createTempFile("", "factory.json").toFile(); FileOutputStream fos = new FileOutputStream(factoryFile); ObjectMapper mapper = new DefaultObjectMapper(); diff --git a/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java b/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java index 57f93b4e4cb..492347a523b 100644 --- a/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java +++ b/server/src/main/java/io/druid/client/AbstractCuratorServerInventoryView.java @@ -334,7 +334,8 @@ public abstract class AbstractCuratorServerInventoryView implemen segment.getIdentifier() ); return curator.checkExists().forPath(toServedSegPath) != null; - } catch (Exception ex) { + } + catch (Exception ex) { throw Throwables.propagate(ex); } } diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 6cf2d27f6aa..900df483e3b 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -328,8 +328,7 @@ public class CachingClusteredClient implements QuerySegmentWalker final byte[] queryCacheKey; if ((populateCache || useCache) // implies strategy != null - && !isBySegment) // explicit bySegment queries are never cached - { + && !isBySegment) /* explicit bySegment queries are never cached */ { queryCacheKey = strategy.computeCacheKey(query); } else { queryCacheKey = null; diff --git a/server/src/main/java/io/druid/client/DruidServer.java b/server/src/main/java/io/druid/client/DruidServer.java index be8eb13c117..bf15d6a8987 100644 --- a/server/src/main/java/io/druid/client/DruidServer.java +++ b/server/src/main/java/io/druid/client/DruidServer.java @@ -106,12 +106,14 @@ public class DruidServer implements Comparable } @JsonProperty("host") - public String getHostAndPort() { + public String getHostAndPort() + { return metadata.getHostAndPort(); } @JsonProperty - public String getHostAndTlsPort() { + public String getHostAndTlsPort() + { return metadata.getHostAndTlsPort(); } diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index 68c7cac3d71..9419f55d0fb 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -215,7 +215,8 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer log.info("Started HttpServerInventoryView."); lifecycleLock.started(); - } finally { + } + finally { lifecycleLock.exitStart(); } } @@ -490,7 +491,8 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer if (!initializationLatch.await(serverHttpTimeout, TimeUnit.MILLISECONDS)) { log.warn("Await initialization timed out for server [%s].", druidServer.getName()); } - } catch (InterruptedException ex) { + } + catch (InterruptedException ex) { log.warn("Await initialization interrupted while waiting on server [%s].", druidServer.getName()); Thread.currentThread().interrupt(); } @@ -619,7 +621,8 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer // sleep for a bit so that retry does not happen immediately. try { Thread.sleep(5000); - } catch (InterruptedException ex) { + } + catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } @@ -632,14 +635,16 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer ); return future; - } catch (Throwable th) { + } + catch (Throwable th) { queue.add(druidServer.getName()); log.makeAlert(th, "Fatal error while fetching segment list from server [%s].", druidServer.getName()).emit(); // sleep for a bit so that retry does not happen immediately. try { Thread.sleep(5000); - } catch (InterruptedException ex) { + } + catch (InterruptedException ex) { Thread.currentThread().interrupt(); } diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryViewConfig.java b/server/src/main/java/io/druid/client/HttpServerInventoryViewConfig.java index 6375b875ec0..ecddbaa5e52 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryViewConfig.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryViewConfig.java @@ -38,7 +38,8 @@ public class HttpServerInventoryViewConfig public HttpServerInventoryViewConfig( @JsonProperty("serverTimeout") Period serverTimeout, @JsonProperty("numThreads") Integer numThreads - ){ + ) + { this.serverTimeout = serverTimeout != null ? serverTimeout.toStandardDuration().getMillis() : 4*60*1000; //4 mins diff --git a/server/src/main/java/io/druid/client/cache/Cache.java b/server/src/main/java/io/druid/client/cache/Cache.java index 02047a72352..d4393a5ae76 100644 --- a/server/src/main/java/io/druid/client/cache/Cache.java +++ b/server/src/main/java/io/druid/client/cache/Cache.java @@ -60,14 +60,16 @@ public interface Cache final public String namespace; final public byte[] key; - public NamedKey(String namespace, byte[] key) { + public NamedKey(String namespace, byte[] key) + { Preconditions.checkArgument(namespace != null, "namespace must not be null"); Preconditions.checkArgument(key != null, "key must not be null"); this.namespace = namespace; this.key = key; } - public byte[] toByteArray() { + public byte[] toByteArray() + { final byte[] nsBytes = StringUtils.toUtf8(this.namespace); return ByteBuffer.allocate(Ints.BYTES + nsBytes.length + this.key.length) .putInt(nsBytes.length) diff --git a/server/src/main/java/io/druid/client/cache/CacheConfig.java b/server/src/main/java/io/druid/client/cache/CacheConfig.java index 07189e31ffc..acfe6a6153d 100644 --- a/server/src/main/java/io/druid/client/cache/CacheConfig.java +++ b/server/src/main/java/io/druid/client/cache/CacheConfig.java @@ -58,7 +58,8 @@ public class CacheConfig return useCache; } - public int getNumBackgroundThreads(){ + public int getNumBackgroundThreads() + { return numBackgroundThreads; } @@ -72,7 +73,8 @@ public class CacheConfig return isQueryCacheable(query.getType()); } - public boolean isQueryCacheable(String queryType) { + public boolean isQueryCacheable(String queryType) + { // O(n) impl, but I don't think we'll ever have a million query types here return !unCacheable.contains(queryType); } diff --git a/server/src/main/java/io/druid/client/cache/MemcachedCustomConnectionFactoryBuilder.java b/server/src/main/java/io/druid/client/cache/MemcachedCustomConnectionFactoryBuilder.java index ff765e7bfd7..9c7b36901cc 100644 --- a/server/src/main/java/io/druid/client/cache/MemcachedCustomConnectionFactoryBuilder.java +++ b/server/src/main/java/io/druid/client/cache/MemcachedCustomConnectionFactoryBuilder.java @@ -56,9 +56,11 @@ class MemcachedCustomConnectionFactoryBuilder extends ConnectionFactoryBuilder @Override public ConnectionFactory build() { - return new DefaultConnectionFactory() { + return new DefaultConnectionFactory() + { @Override - public NodeLocator createLocator(List nodes) { + public NodeLocator createLocator(List nodes) + { switch (locator) { case ARRAY_MOD: return new ArrayModNodeLocator(nodes, getHashAlg()); @@ -81,116 +83,134 @@ class MemcachedCustomConnectionFactoryBuilder extends ConnectionFactoryBuilder } @Override - public BlockingQueue createOperationQueue() { - return opQueueFactory == null ? super.createOperationQueue() - : opQueueFactory.create(); + public BlockingQueue createOperationQueue() + { + return opQueueFactory == null ? super.createOperationQueue() : opQueueFactory.create(); } @Override - public BlockingQueue createReadOperationQueue() { - return readQueueFactory == null ? super.createReadOperationQueue() - : readQueueFactory.create(); + public BlockingQueue createReadOperationQueue() + { + return readQueueFactory == null ? super.createReadOperationQueue() : readQueueFactory.create(); } @Override - public BlockingQueue createWriteOperationQueue() { - return writeQueueFactory == null ? super.createReadOperationQueue() - : writeQueueFactory.create(); + public BlockingQueue createWriteOperationQueue() + { + return writeQueueFactory == null ? super.createReadOperationQueue() : writeQueueFactory.create(); } @Override - public Transcoder getDefaultTranscoder() { + public Transcoder getDefaultTranscoder() + { return transcoder == null ? super.getDefaultTranscoder() : transcoder; } @Override - public FailureMode getFailureMode() { + public FailureMode getFailureMode() + { return failureMode == null ? super.getFailureMode() : failureMode; } @Override - public HashAlgorithm getHashAlg() { + public HashAlgorithm getHashAlg() + { return hashAlg == null ? super.getHashAlg() : hashAlg; } @Override - public Collection getInitialObservers() { + public Collection getInitialObservers() + { return initialObservers; } @Override - public OperationFactory getOperationFactory() { + public OperationFactory getOperationFactory() + { return opFact == null ? super.getOperationFactory() : opFact; } @Override - public long getOperationTimeout() { + public long getOperationTimeout() + { return opTimeout == -1 ? super.getOperationTimeout() : opTimeout; } @Override - public int getReadBufSize() { + public int getReadBufSize() + { return readBufSize == -1 ? super.getReadBufSize() : readBufSize; } @Override - public boolean isDaemon() { + public boolean isDaemon() + { return isDaemon; } @Override - public boolean shouldOptimize() { + public boolean shouldOptimize() + { return shouldOptimize; } @Override - public boolean useNagleAlgorithm() { + public boolean useNagleAlgorithm() + { return useNagle; } @Override - public long getMaxReconnectDelay() { + public long getMaxReconnectDelay() + { return maxReconnectDelay; } @Override - public AuthDescriptor getAuthDescriptor() { + public AuthDescriptor getAuthDescriptor() + { return authDescriptor; } @Override - public long getOpQueueMaxBlockTime() { - return opQueueMaxBlockTime > -1 ? opQueueMaxBlockTime - : super.getOpQueueMaxBlockTime(); + public long getOpQueueMaxBlockTime() + { + return opQueueMaxBlockTime > -1 ? opQueueMaxBlockTime : super.getOpQueueMaxBlockTime(); } @Override - public int getTimeoutExceptionThreshold() { + public int getTimeoutExceptionThreshold() + { return timeoutExceptionThreshold; } @Override - public MetricType enableMetrics() { + public MetricType enableMetrics() + { return metricType == null ? super.enableMetrics() : metricType; } @Override - public MetricCollector getMetricCollector() { + public MetricCollector getMetricCollector() + { return collector == null ? super.getMetricCollector() : collector; } @Override - public ExecutorService getListenerExecutorService() { + public ExecutorService getListenerExecutorService() + { return executorService == null ? super.getListenerExecutorService() : executorService; } @Override - public boolean isDefaultExecutorService() { + public boolean isDefaultExecutorService() + { return executorService == null; } @Override - public long getAuthWaitTime() { + public long getAuthWaitTime() + { return authWaitTime; } }; diff --git a/server/src/main/java/io/druid/client/selector/ServerSelector.java b/server/src/main/java/io/druid/client/selector/ServerSelector.java index d04b66bf1b2..321c6034fb3 100644 --- a/server/src/main/java/io/druid/client/selector/ServerSelector.java +++ b/server/src/main/java/io/druid/client/selector/ServerSelector.java @@ -91,7 +91,8 @@ public class ServerSelector implements DiscoverySelector } } - public List getCandidates(final int numCandidates) { + public List getCandidates(final int numCandidates) + { synchronized (this) { if (numCandidates > 0) { return strategy.pick(servers, segment.get(), numCandidates) diff --git a/server/src/main/java/io/druid/curator/discovery/CuratorServiceUtils.java b/server/src/main/java/io/druid/curator/discovery/CuratorServiceUtils.java index 225111b1f5a..c5acbb51a04 100644 --- a/server/src/main/java/io/druid/curator/discovery/CuratorServiceUtils.java +++ b/server/src/main/java/io/druid/curator/discovery/CuratorServiceUtils.java @@ -36,7 +36,8 @@ public class CuratorServiceUtils * @param serviceName * @return */ - protected static String makeCanonicalServiceName(String serviceName) { + protected static String makeCanonicalServiceName(String serviceName) + { return serviceName.replaceAll("/", ":"); } } diff --git a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java index 710f6828482..50eb29f8e87 100644 --- a/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java +++ b/server/src/main/java/io/druid/curator/discovery/DiscoveryModule.java @@ -56,6 +56,7 @@ import org.apache.curator.x.discovery.details.ServiceCacheListener; import java.io.IOException; import java.lang.annotation.Annotation; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.Executor; @@ -351,33 +352,31 @@ public class DiscoveryModule implements Module @Override public void start() throws Exception { - + // nothing } @Override public void close() throws IOException { - + // nothing } @Override public void addListener(ServiceCacheListener listener) { - + // nothing } @Override - public void addListener( - ServiceCacheListener listener, Executor executor - ) + public void addListener(ServiceCacheListener listener, Executor executor) { - + // nothing } @Override public void removeListener(ServiceCacheListener listener) { - + // nothing } } } @@ -409,12 +408,14 @@ public class DiscoveryModule implements Module } @Override - public ServiceProviderBuilder downInstancePolicy(DownInstancePolicy downInstancePolicy) { + public ServiceProviderBuilder downInstancePolicy(DownInstancePolicy downInstancePolicy) + { return this; } @Override - public ServiceProviderBuilder additionalFilter(InstanceFilter tInstanceFilter) { + public ServiceProviderBuilder additionalFilter(InstanceFilter tInstanceFilter) + { return this; } } @@ -424,7 +425,7 @@ public class DiscoveryModule implements Module @Override public void start() throws Exception { - + // nothing } @Override @@ -436,18 +437,19 @@ public class DiscoveryModule implements Module @Override public Collection> getAllInstances() throws Exception { - return null; + return Collections.emptyList(); } @Override - public void noteError(ServiceInstance tServiceInstance) { - + public void noteError(ServiceInstance tServiceInstance) + { + // nothing } @Override public void close() throws IOException { - + // nothing } } } diff --git a/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java b/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java index 3ddaa2c0230..fbbbc1c4d46 100644 --- a/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java +++ b/server/src/main/java/io/druid/curator/discovery/ServerDiscoveryFactory.java @@ -75,7 +75,8 @@ public class ServerDiscoveryFactory } @Override - public void noteError(ServiceInstance tServiceInstance) { + public void noteError(ServiceInstance tServiceInstance) + { // do nothing } diff --git a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java index a5fd7c6512e..eab70c20bd6 100644 --- a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java +++ b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java @@ -175,10 +175,12 @@ public class CuratorInventoryManager ); } - private byte[] getZkDataForNode(String path) { + private byte[] getZkDataForNode(String path) + { try { return curatorFramework.getData().decompressed().forPath(path); - } catch(Exception ex) { + } + catch (Exception ex) { log.warn(ex, "Exception while getting data for node %s", path); return null; } diff --git a/server/src/main/java/io/druid/guice/CacheModule.java b/server/src/main/java/io/druid/guice/CacheModule.java index 2db373fb906..f73a3f8b88b 100644 --- a/server/src/main/java/io/druid/guice/CacheModule.java +++ b/server/src/main/java/io/druid/guice/CacheModule.java @@ -53,7 +53,8 @@ public class CacheModule implements Module binder.install(new HybridCacheModule(prefix)); } - public static class HybridCacheModule implements Module { + public static class HybridCacheModule implements Module + { private final String prefix; diff --git a/server/src/main/java/io/druid/initialization/Initialization.java b/server/src/main/java/io/druid/initialization/Initialization.java index 6c92dac3c8c..b4a86843ad4 100644 --- a/server/src/main/java/io/druid/initialization/Initialization.java +++ b/server/src/main/java/io/druid/initialization/Initialization.java @@ -330,7 +330,8 @@ public class Initialization } } return urls; - } catch (IOException ex) { + } + catch (IOException ex) { throw Throwables.propagate(ex); } } diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java index de913557e22..d4ca065153b 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java @@ -113,7 +113,10 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector */ public abstract String getQuoteString(); - public String getValidationQuery() { return "SELECT 1"; } + public String getValidationQuery() + { + return "SELECT 1"; + } public abstract boolean tableExists(Handle handle, final String tableName); @@ -554,7 +557,10 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector return matched.get(0); } - public MetadataStorageConnectorConfig getConfig() { return config.get(); } + public MetadataStorageConnectorConfig getConfig() + { + return config.get(); + } protected BasicDataSource getDatasource() { diff --git a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java index 38ce6e737e1..aba0e87423a 100644 --- a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java +++ b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java @@ -89,12 +89,16 @@ public class DerbyConnector extends SQLMetadataConnector } @Override - public String getQuoteString() { + public String getQuoteString() + { return QUOTE_STRING; } @Override - public DBI getDBI() { return dbi; } + public DBI getDBI() + { + return dbi; + } @Override protected int getStreamingFetchSize() @@ -104,7 +108,10 @@ public class DerbyConnector extends SQLMetadataConnector } @Override - public String getValidationQuery() { return "VALUES 1"; } + public String getValidationQuery() + { + return "VALUES 1"; + } @LifecycleStart public void start() diff --git a/server/src/main/java/io/druid/query/lookup/MapLookupExtractorFactory.java b/server/src/main/java/io/druid/query/lookup/MapLookupExtractorFactory.java index 5ac15478ca3..77b80df3a1f 100644 --- a/server/src/main/java/io/druid/query/lookup/MapLookupExtractorFactory.java +++ b/server/src/main/java/io/druid/query/lookup/MapLookupExtractorFactory.java @@ -147,6 +147,8 @@ public class MapLookupExtractorFactory implements LookupExtractorFactory @GET @Produces(MediaType.APPLICATION_JSON) public Response getMap() - {return Response.ok(map).build();} + { + return Response.ok(map).build(); + } } } diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index 44eb18dea8f..118b53221fe 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -178,7 +178,8 @@ public class ArbitraryGranularitySpec implements GranularitySpec } @Override - public GranularitySpec withIntervals(List inputIntervals) { + public GranularitySpec withIntervals(List inputIntervals) + { return new ArbitraryGranularitySpec(queryGranularity, rollup, inputIntervals); } } diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java index bc3aee77958..7205f552422 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/UniformGranularitySpec.java @@ -180,7 +180,8 @@ public class UniformGranularitySpec implements GranularitySpec } @Override - public GranularitySpec withIntervals(List inputIntervals) { + public GranularitySpec withIntervals(List inputIntervals) + { return new UniformGranularitySpec(segmentGranularity, queryGranularity, rollup, inputIntervals); } } diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java index 160e00e65a9..8c8715d1f9d 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentKiller.java @@ -39,7 +39,8 @@ public class LocalDataSegmentKiller implements DataSegmentKiller private final File storageDirectory; @Inject - public LocalDataSegmentKiller(LocalDataSegmentPusherConfig config) { + public LocalDataSegmentKiller(LocalDataSegmentPusherConfig config) + { this.storageDirectory = config.getStorageDirectory(); } diff --git a/server/src/main/java/io/druid/segment/loading/SegmentLoaderConfig.java b/server/src/main/java/io/druid/segment/loading/SegmentLoaderConfig.java index 3684b2744cc..1b3fbf74bed 100644 --- a/server/src/main/java/io/druid/segment/loading/SegmentLoaderConfig.java +++ b/server/src/main/java/io/druid/segment/loading/SegmentLoaderConfig.java @@ -77,7 +77,8 @@ public class SegmentLoaderConfig return numLoadingThreads; } - public int getNumBootstrapThreads() { + public int getNumBootstrapThreads() + { return numBootstrapThreads == null ? numLoadingThreads : numBootstrapThreads; } diff --git a/server/src/main/java/io/druid/segment/realtime/FireDepartmentMetrics.java b/server/src/main/java/io/druid/segment/realtime/FireDepartmentMetrics.java index 87a262cb219..d61de1aee04 100644 --- a/server/src/main/java/io/druid/segment/realtime/FireDepartmentMetrics.java +++ b/server/src/main/java/io/druid/segment/realtime/FireDepartmentMetrics.java @@ -94,23 +94,28 @@ public class FireDepartmentMetrics mergeTimeMillis.addAndGet(millis); } - public void incrementMergeCpuTime(long mergeTime){ + public void incrementMergeCpuTime(long mergeTime) + { mergeCpuTime.addAndGet(mergeTime); } - public void incrementPersistCpuTime(long persistTime){ + public void incrementPersistCpuTime(long persistTime) + { persistCpuTime.addAndGet(persistTime); } - public void incrementHandOffCount(){ + public void incrementHandOffCount() + { handOffCount.incrementAndGet(); } - public void setSinkCount(long sinkCount){ + public void setSinkCount(long sinkCount) + { this.sinkCount.set(sinkCount); } - public void reportMessageMaxTimestamp(long messageMaxTimestamp){ + public void reportMessageMaxTimestamp(long messageMaxTimestamp) + { this.messageMaxTimestamp.set(Math.max(messageMaxTimestamp, this.messageMaxTimestamp.get())); } diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 16a9c86f494..678fb2e7761 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -111,7 +111,8 @@ public class AppenderatorPlumber implements Plumber log.info("Creating plumber using rejectionPolicy[%s]", getRejectionPolicy()); } - public Map getSegmentsView() { + public Map getSegmentsView() + { return ImmutableMap.copyOf(segments); } diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactory.java b/server/src/main/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactory.java index 2b81e4f4d85..65245252686 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/DefaultOfflineAppenderatorFactory.java @@ -43,7 +43,8 @@ public class DefaultOfflineAppenderatorFactory implements AppenderatorFactory @JacksonInject ObjectMapper objectMapper, @JacksonInject IndexIO indexIO, @JacksonInject IndexMerger indexMerger - ) { + ) + { this.dataSegmentPusher = dataSegmentPusher; this.objectMapper = objectMapper; this.indexIO = indexIO; diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java index 8b5f3d81bae..6c762e2ed95 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/ServiceAnnouncingChatHandlerProvider.java @@ -77,8 +77,7 @@ public class ServiceAnnouncingChatHandlerProvider implements ChatHandlerProvider throw new ISE("handler already registered for service[%s]", service); } - if (announce) - { + if (announce) { try { serviceAnnouncer.announce(makeDruidNode(service)); if (!announcements.add(service)) { @@ -103,8 +102,7 @@ public class ServiceAnnouncingChatHandlerProvider implements ChatHandlerProvider return; } - if (announcements.contains(service)) - { + if (announcements.contains(service)) { try { serviceAnnouncer.unannounce(makeDruidNode(service)); } diff --git a/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java b/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java index a6e84dc44de..e4e3fb19b1c 100644 --- a/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java +++ b/server/src/main/java/io/druid/segment/realtime/firehose/WikipediaIrcDecoder.java @@ -102,7 +102,8 @@ class WikipediaIrcDecoder implements IrcDecoder } } - private DatabaseReader openDefaultGeoIpDb() { + private DatabaseReader openDefaultGeoIpDb() + { File geoDb = new File(System.getProperty("java.io.tmpdir"), this.getClass().getCanonicalName() + ".GeoLite2-City.mmdb"); try { @@ -118,22 +119,26 @@ class WikipediaIrcDecoder implements IrcDecoder } } - private DatabaseReader openDefaultGeoIpDb(File geoDb) { + private DatabaseReader openDefaultGeoIpDb(File geoDb) + { downloadGeoLiteDbToFile(geoDb); return openGeoIpDb(geoDb); } - private DatabaseReader openGeoIpDb(File geoDb) { + private DatabaseReader openGeoIpDb(File geoDb) + { try { DatabaseReader reader = new DatabaseReader(geoDb); log.info("Using geo ip database at [%s].", geoDb); return reader; - } catch (IOException e) { + } + catch (IOException e) { throw new RuntimeException("Could not open geo db at ["+ geoDb.getAbsolutePath() +"].", e); } } - private void downloadGeoLiteDbToFile(File geoDb) { + private void downloadGeoLiteDbToFile(File geoDb) + { if (geoDb.exists()) { return; } diff --git a/server/src/main/java/io/druid/server/DruidNode.java b/server/src/main/java/io/druid/server/DruidNode.java index eb8c9f52acd..a67cf1015b9 100644 --- a/server/src/main/java/io/druid/server/DruidNode.java +++ b/server/src/main/java/io/druid/server/DruidNode.java @@ -213,7 +213,8 @@ public class DruidNode return null; } - public String getHostAndPortToUse() { + public String getHostAndPortToUse() + { return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); } diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index 6c1ebb82055..9f8e90c9579 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -284,11 +284,13 @@ public class QueryResource implements QueryCountStatsProvider os.close(); success = true; - } catch (Exception ex) { + } + catch (Exception ex) { exceptionStr = ex.toString(); log.error(ex, "Unable to send query response."); throw Throwables.propagate(ex); - } finally { + } + finally { try { if (success) { successfulQueryCount.incrementAndGet(); @@ -331,9 +333,11 @@ public class QueryResource implements QueryCountStatsProvider ) ) ); - } catch (Exception ex) { + } + catch (Exception ex) { log.error(ex, "Unable to log query [%s]!", theQuery); - } finally { + } + finally { Thread.currentThread().setName(currThreadName); } } diff --git a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java index 082050b59b5..aa87780fc70 100644 --- a/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java +++ b/server/src/main/java/io/druid/server/coordination/DruidServerMetadata.java @@ -60,7 +60,8 @@ public class DruidServerMetadata return name; } - public String getHost() { + public String getHost() + { return getHostAndTlsPort() != null ? getHostAndTlsPort() : getHostAndPort(); } diff --git a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java index 4015bf1ca39..85472c71b76 100644 --- a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java +++ b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java @@ -145,7 +145,8 @@ public class SegmentChangeRequestHistory } else { try { future.set(getRequestsSinceWithoutWait(counter)); - } catch (Exception ex) { + } + catch (Exception ex) { future.setException(ex); } } @@ -197,7 +198,8 @@ public class SegmentChangeRequestHistory for (Map.Entry e : waitingFuturesCopy.entrySet()) { try { e.getKey().set(getRequestsSinceWithoutWait(e.getValue())); - } catch (Exception ex) { + } + catch (Exception ex) { e.getKey().setException(ex); } } diff --git a/server/src/main/java/io/druid/server/coordinator/BalancerStrategyFactory.java b/server/src/main/java/io/druid/server/coordinator/BalancerStrategyFactory.java index 011b63f5908..4cf46d95e7d 100644 --- a/server/src/main/java/io/druid/server/coordinator/BalancerStrategyFactory.java +++ b/server/src/main/java/io/druid/server/coordinator/BalancerStrategyFactory.java @@ -26,7 +26,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; @JsonSubTypes(value = { @JsonSubTypes.Type(name = "diskNormalized", value = DiskNormalizedCostBalancerStrategyFactory.class), @JsonSubTypes.Type(name = "cost", value = CostBalancerStrategyFactory.class), - @JsonSubTypes.Type(name = "random", value = RandomBalancerStrategyFactory.class), + @JsonSubTypes.Type(name = "random", value = RandomBalancerStrategyFactory.class), }) public interface BalancerStrategyFactory { diff --git a/server/src/main/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategy.java b/server/src/main/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategy.java index cae77359a50..b003c145e3a 100644 --- a/server/src/main/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategy.java +++ b/server/src/main/java/io/druid/server/coordinator/DiskNormalizedCostBalancerStrategy.java @@ -45,8 +45,7 @@ public class DiskNormalizedCostBalancerStrategy extends CostBalancerStrategy } int nSegments = 1; - if(server.getServer().getSegments().size() > 0) - { + if(server.getServer().getSegments().size() > 0) { nSegments = server.getServer().getSegments().size(); } diff --git a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java index 0b68a670b90..3d41517204e 100644 --- a/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java +++ b/server/src/main/java/io/druid/server/coordinator/DruidCoordinator.java @@ -738,8 +738,9 @@ public class DruidCoordinator } catch (Exception e) { log.makeAlert(e, "Caught exception, ignoring so that schedule keeps going.").emit(); - } finally { - if(balancerExec != null){ + } + finally { + if (balancerExec != null){ balancerExec.shutdownNow(); } } diff --git a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java index 3ee0cdea538..59c45239597 100644 --- a/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java +++ b/server/src/main/java/io/druid/server/coordinator/LoadQueuePeon.java @@ -191,7 +191,8 @@ public class LoadQueuePeon segmentsToDrop.put(segment, new SegmentHolder(segment, DROP, Collections.singletonList(callback))); } - private void processSegmentChangeRequest() { + private void processSegmentChangeRequest() + { if (currentlyProcessing == null) { if (!segmentsToDrop.isEmpty()) { currentlyProcessing = segmentsToDrop.firstEntry().getValue(); @@ -275,7 +276,8 @@ public class LoadQueuePeon entryRemoved(path); } - } catch (Exception e) { + } + catch (Exception e) { failAssign(e); } } else { diff --git a/server/src/main/java/io/druid/server/coordinator/RandomBalancerStrategy.java b/server/src/main/java/io/druid/server/coordinator/RandomBalancerStrategy.java index dd6255821d6..8c2aed3397a 100644 --- a/server/src/main/java/io/druid/server/coordinator/RandomBalancerStrategy.java +++ b/server/src/main/java/io/druid/server/coordinator/RandomBalancerStrategy.java @@ -29,19 +29,13 @@ public class RandomBalancerStrategy implements BalancerStrategy private final ReservoirSegmentSampler sampler = new ReservoirSegmentSampler(); @Override - public ServerHolder findNewSegmentHomeReplicator( - DataSegment proposalSegment, List serverHolders - ) + public ServerHolder findNewSegmentHomeReplicator(DataSegment proposalSegment, List serverHolders) { - if (serverHolders.size()==1) - { + if (serverHolders.size() == 1) { return null; - } - else - { + } else { ServerHolder holder = serverHolders.get(new Random().nextInt(serverHolders.size())); - while (holder.isServingSegment(proposalSegment)) - { + while (holder.isServingSegment(proposalSegment)) { holder = serverHolders.get(new Random().nextInt(serverHolders.size())); } return holder; @@ -49,9 +43,7 @@ public class RandomBalancerStrategy implements BalancerStrategy } @Override - public ServerHolder findNewSegmentHomeBalancer( - DataSegment proposalSegment, List serverHolders - ) + public ServerHolder findNewSegmentHomeBalancer(DataSegment proposalSegment, List serverHolders) { return null; //To change body of implemented methods use File | Settings | File Templates. } @@ -63,9 +55,7 @@ public class RandomBalancerStrategy implements BalancerStrategy } @Override - public void emitStats( - String tier, CoordinatorStats stats, List serverHolderList - ) + public void emitStats(String tier, CoordinatorStats stats, List serverHolderList) { } } diff --git a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java index 8e6771c08ce..c3552e10d91 100644 --- a/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java +++ b/server/src/main/java/io/druid/server/http/HostAndPortWithScheme.java @@ -27,16 +27,19 @@ public class HostAndPortWithScheme private String scheme; private HostAndPort hostAndPort; - public HostAndPortWithScheme(String scheme, HostAndPort hostAndPort) { + public HostAndPortWithScheme(String scheme, HostAndPort hostAndPort) + { this.scheme = scheme; this.hostAndPort = hostAndPort; } - public static HostAndPortWithScheme fromParts(String scheme, String host, int port) { + public static HostAndPortWithScheme fromParts(String scheme, String host, int port) + { return new HostAndPortWithScheme(scheme, HostAndPort.fromParts(host, port)); } - public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString) { + public static HostAndPortWithScheme fromString(String hostPortMaybeSchemeString) + { if (hostPortMaybeSchemeString.startsWith("http")) { return HostAndPortWithScheme.fromString( hostPortMaybeSchemeString.substring(0, hostPortMaybeSchemeString.indexOf(':')), @@ -46,28 +49,34 @@ public class HostAndPortWithScheme return HostAndPortWithScheme.fromString("http", hostPortMaybeSchemeString); } - public static HostAndPortWithScheme fromString(String scheme, String hostPortString) { + public static HostAndPortWithScheme fromString(String scheme, String hostPortString) + { return new HostAndPortWithScheme(checkAndGetScheme(scheme), HostAndPort.fromString(hostPortString)); } - private static String checkAndGetScheme(String scheme) { + private static String checkAndGetScheme(String scheme) + { Preconditions.checkState(scheme.toLowerCase().equals("http") || scheme.toLowerCase().equals("https")); return scheme.toLowerCase(); } - public String getScheme() { + public String getScheme() + { return scheme; } - public String getHostText() { + public String getHostText() + { return hostAndPort.getHostText(); } - public int getPort() { + public int getPort() + { return hostAndPort.getPort(); } - public int getPortOrDefault(int defaultPort) { + public int getPortOrDefault(int defaultPort) + { return hostAndPort.getPortOrDefault(defaultPort); } @@ -77,7 +86,8 @@ public class HostAndPortWithScheme } @Override - public String toString() { + public String toString() + { return String.format("%s:%s", scheme, hostAndPort.toString()); } diff --git a/server/src/main/java/io/druid/server/http/IntervalsResource.java b/server/src/main/java/io/druid/server/http/IntervalsResource.java index 25642be9d5a..630531252c2 100644 --- a/server/src/main/java/io/druid/server/http/IntervalsResource.java +++ b/server/src/main/java/io/druid/server/http/IntervalsResource.java @@ -165,7 +165,9 @@ public class IntervalsResource private void setProperties( final Map>> retVal, - DruidDataSource dataSource, DataSegment dataSegment) { + DruidDataSource dataSource, DataSegment dataSegment + ) + { Map properties = retVal.get(dataSegment.getInterval()).get(dataSource.getName()); if (properties == null) { properties = Maps.newHashMap(); diff --git a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java index dd6520eea6f..112d85cd9b7 100644 --- a/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java +++ b/server/src/main/java/io/druid/server/http/LookupCoordinatorResource.java @@ -335,7 +335,8 @@ public class LookupCoordinatorResource } return Response.ok(result).build(); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting lookups status"); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } @@ -378,7 +379,8 @@ public class LookupCoordinatorResource } return Response.ok(lookupStatusMap).build(); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting lookups status for tier [%s].", tier); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } @@ -424,7 +426,8 @@ public class LookupCoordinatorResource detailed ) ).build(); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting lookups status for tier [%s] and lookup [%s].", tier, lookup); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } @@ -502,7 +505,8 @@ public class LookupCoordinatorResource } return Response.ok(result).build(); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting node status."); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } @@ -532,7 +536,8 @@ public class LookupCoordinatorResource } return Response.ok(tierNodesStatus).build(); - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting node status for tier [%s].", tier); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } @@ -558,7 +563,8 @@ public class LookupCoordinatorResource return Response.ok(lookupsState).build(); } - } catch (Exception ex) { + } + catch (Exception ex) { LOG.error(ex, "Error getting node status for [%s].", hostAndPort); return Response.serverError().entity(ServletResourceUtils.sanitizeException(ex)).build(); } diff --git a/server/src/main/java/io/druid/server/http/RulesResource.java b/server/src/main/java/io/druid/server/http/RulesResource.java index 1d93d61df7d..e7eabccb577 100644 --- a/server/src/main/java/io/druid/server/http/RulesResource.java +++ b/server/src/main/java/io/druid/server/http/RulesResource.java @@ -123,9 +123,9 @@ public class RulesResource ) { try { - return Response.ok(getRuleHistory(dataSourceName, interval, count)) - .build(); - } catch (IllegalArgumentException e) { + return Response.ok(getRuleHistory(dataSourceName, interval, count)).build(); + } + catch (IllegalArgumentException e) { return Response.status(Response.Status.BAD_REQUEST) .entity(ImmutableMap.of("error", e.getMessage())) .build(); @@ -142,9 +142,9 @@ public class RulesResource ) { try { - return Response.ok(getRuleHistory(null, interval, count)) - .build(); - } catch (IllegalArgumentException e) { + return Response.ok(getRuleHistory(null, interval, count)).build(); + } + catch (IllegalArgumentException e) { return Response.status(Response.Status.BAD_REQUEST) .entity(ImmutableMap.of("error", e.getMessage())) .build(); @@ -157,18 +157,18 @@ public class RulesResource final Integer count ) throws IllegalArgumentException { - if (interval == null && count != null) { - if (dataSourceName != null) { - return auditManager.fetchAuditHistory(dataSourceName, "rules", count); - } - return auditManager.fetchAuditHistory("rules", count); - } - - Interval theInterval = interval == null ? null : new Interval(interval); + if (interval == null && count != null) { if (dataSourceName != null) { - return auditManager.fetchAuditHistory(dataSourceName, "rules", theInterval); + return auditManager.fetchAuditHistory(dataSourceName, "rules", count); } - return auditManager.fetchAuditHistory("rules", theInterval); + return auditManager.fetchAuditHistory("rules", count); + } + + Interval theInterval = interval == null ? null : new Interval(interval); + if (dataSourceName != null) { + return auditManager.fetchAuditHistory(dataSourceName, "rules", theInterval); + } + return auditManager.fetchAuditHistory("rules", theInterval); } } diff --git a/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java b/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java index b49be70226d..142fb45c7ca 100644 --- a/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java +++ b/server/src/main/java/io/druid/server/initialization/HttpEmitterModule.java @@ -21,7 +21,6 @@ package io.druid.server.initialization; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Supplier; -import com.google.common.base.Throwables; import com.google.inject.Binder; import com.google.inject.Module; import com.google.inject.Provides; @@ -53,8 +52,9 @@ public class HttpEmitterModule implements Module final SSLContext context; try { context = SSLContext.getDefault(); - } catch (NoSuchAlgorithmException e) { - throw Throwables.propagate(e); + } + catch (NoSuchAlgorithmException e) { + throw new RuntimeException(e); } binder.bind(SSLContext.class).toProvider(Providers.of(context)).in(LazySingleton.class); diff --git a/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java b/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java index 3335eb820ad..eb8e8c5380f 100644 --- a/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java +++ b/server/src/main/java/io/druid/server/initialization/ZkPathsConfig.java @@ -96,27 +96,26 @@ public class ZkPathsConfig } @Override - public boolean equals(Object other){ - if(null == other){ + public boolean equals(Object other) + { + if (null == other) { return false; } - if(this == other){ + if (this == other) { return true; } - if(!(other instanceof ZkPathsConfig)){ + if (!(other instanceof ZkPathsConfig)) { return false; } ZkPathsConfig otherConfig = (ZkPathsConfig) other; - if( - this.getBase().equals(otherConfig.getBase()) && + if (this.getBase().equals(otherConfig.getBase()) && this.getAnnouncementsPath().equals(otherConfig.getAnnouncementsPath()) && this.getConnectorPath().equals(otherConfig.getConnectorPath()) && this.getLiveSegmentsPath().equals(otherConfig.getLiveSegmentsPath()) && this.getCoordinatorPath().equals(otherConfig.getCoordinatorPath()) && this.getLoadQueuePath().equals(otherConfig.getLoadQueuePath()) && this.getPropertiesPath().equals(otherConfig.getPropertiesPath()) && - this.getServedSegmentsPath().equals(otherConfig.getServedSegmentsPath()) - ){ + this.getServedSegmentsPath().equals(otherConfig.getServedSegmentsPath())) { return true; } return false; diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java b/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java index 4883e42bf48..b85e0e1d3e9 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ResponseHeaderFilterHolder.java @@ -101,6 +101,8 @@ public class ResponseHeaderFilterHolder implements ServletFilterHolder } @Override - public void destroy() { } + public void destroy() + { + } } } diff --git a/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java b/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java index 0f9bfc7ebaa..6a22f2303e3 100644 --- a/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java +++ b/server/src/main/java/io/druid/server/initialization/jetty/ServletFilterHolder.java @@ -33,7 +33,8 @@ import java.util.Map; * Note that some of the druid nodes (router for example) use async servlets and your filter * implementation should be able to handle those requests properly. */ -public interface ServletFilterHolder { +public interface ServletFilterHolder +{ /** * Get the Filter object that should be added to the servlet. diff --git a/server/src/main/java/io/druid/server/log/FileRequestLogger.java b/server/src/main/java/io/druid/server/log/FileRequestLogger.java index a35fc54b8eb..b0beed59445 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLogger.java @@ -106,10 +106,11 @@ public class FileRequestLogger implements RequestLogger } } - private OutputStreamWriter getFileWriter() throws FileNotFoundException { + private OutputStreamWriter getFileWriter() throws FileNotFoundException + { return new OutputStreamWriter( - new FileOutputStream(new File(baseDir, currentDay.toString("yyyy-MM-dd'.log'")), true), - Charsets.UTF_8 + new FileOutputStream(new File(baseDir, currentDay.toString("yyyy-MM-dd'.log'")), true), + Charsets.UTF_8 ); } diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java index 95a3226ebd9..22221e6b73c 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupCoordinatorManager.java @@ -301,8 +301,9 @@ public class LookupCoordinatorManager } } ); - } catch (IOException e) { - throw Throwables.propagate(e); + } + catch (IOException e) { + throw new RuntimeException(e); } } @@ -571,12 +572,14 @@ public class LookupCoordinatorManager allFuture.cancel(true); Thread.currentThread().interrupt(); throw ex; - } catch (Exception ex) { + } + catch (Exception ex) { allFuture.cancel(true); throw ex; } - } catch (Exception ex) { + } + catch (Exception ex) { LOG.makeAlert(ex, "Failed to finish lookup management loop.").emit(); } @@ -760,10 +763,9 @@ public class LookupCoordinatorManager response ); return response; - } catch (IOException ex) { - throw new IOE( - ex, "Failed to parse update response from [%s]. response [%s]", url, result - ); + } + catch (IOException ex) { + throw new IOE(ex, "Failed to parse update response from [%s]. response [%s]", url, result); } } else { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); @@ -808,17 +810,16 @@ public class LookupCoordinatorManager LOOKUPS_STATE_TYPE_REFERENCE ); LOG.debug( - "Get on [%s], Status: %s reason: [%s], Response [%s].", url, returnCode.get(), reasonString.get(), + "Get on [%s], Status: [%s] reason: [%s], Response [%s].", + url, + returnCode.get(), + reasonString.get(), response ); return response; - } catch(IOException ex) { - throw new IOE( - ex, - "Failed to parser GET lookups response from [%s]. response [%s].", - url, - result - ); + } + catch(IOException ex) { + throw new IOE(ex, "Failed to parser GET lookups response from [%s]. response [%s].", url, result); } } else { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); diff --git a/server/src/main/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainer.java b/server/src/main/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainer.java index b90af36fe68..2423f0d0c56 100644 --- a/server/src/main/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainer.java +++ b/server/src/main/java/io/druid/server/lookup/cache/LookupExtractorFactoryMapContainer.java @@ -59,7 +59,8 @@ public class LookupExtractorFactoryMapContainer return lookupExtractorFactory; } - public boolean replaces(LookupExtractorFactoryMapContainer other) { + public boolean replaces(LookupExtractorFactoryMapContainer other) + { if (version == null && other.getVersion() == null) { return false; } diff --git a/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java b/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java index 0b8472f4e32..fb2d4ea7e35 100644 --- a/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java +++ b/server/src/main/java/io/druid/server/router/JavaScriptTieredBrokerSelectorStrategy.java @@ -24,8 +24,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; -import com.google.common.base.Throwables; - import io.druid.java.util.common.ISE; import io.druid.js.JavaScriptConfig; import io.druid.query.Query; @@ -61,8 +59,9 @@ public class JavaScriptTieredBrokerSelectorStrategy implements TieredBrokerSelec final ScriptEngine engine = new ScriptEngineManager().getEngineByName("javascript"); try { ((Compilable)engine).compile("var apply = " + fn).eval(); - } catch(ScriptException e) { - Throwables.propagate(e); + } + catch (ScriptException e) { + throw new RuntimeException(e); } this.function = fn; this.fnSelector = ((Invocable)engine).getInterface(SelectorFunction.class); diff --git a/server/src/main/java/io/druid/server/security/Access.java b/server/src/main/java/io/druid/server/security/Access.java index 69fee1386bc..19f6a611575 100644 --- a/server/src/main/java/io/druid/server/security/Access.java +++ b/server/src/main/java/io/druid/server/security/Access.java @@ -26,16 +26,19 @@ public class Access private final boolean allowed; private String message; - public Access(boolean allowed) { + public Access(boolean allowed) + { this(allowed, ""); } - public Access(boolean allowed, String message) { + public Access(boolean allowed, String message) + { this.allowed = allowed; this.message = message; } - public boolean isAllowed() { + public boolean isAllowed() + { return allowed; } diff --git a/server/src/main/java/io/druid/server/security/AuthConfig.java b/server/src/main/java/io/druid/server/security/AuthConfig.java index 8ade4ce6c41..619ab158a75 100644 --- a/server/src/main/java/io/druid/server/security/AuthConfig.java +++ b/server/src/main/java/io/druid/server/security/AuthConfig.java @@ -30,14 +30,14 @@ public class AuthConfig * */ public static final String DRUID_AUTH_TOKEN = "Druid-Auth-Token"; - public AuthConfig() { + public AuthConfig() + { this(false); } @JsonCreator - public AuthConfig( - @JsonProperty("enabled") boolean enabled - ){ + public AuthConfig(@JsonProperty("enabled") boolean enabled) + { this.enabled = enabled; } /** diff --git a/server/src/main/java/io/druid/timeline/partition/LinearShardSpec.java b/server/src/main/java/io/druid/timeline/partition/LinearShardSpec.java index 593dad190d0..000f035921d 100644 --- a/server/src/main/java/io/druid/timeline/partition/LinearShardSpec.java +++ b/server/src/main/java/io/druid/timeline/partition/LinearShardSpec.java @@ -46,7 +46,8 @@ public class LinearShardSpec implements ShardSpec @JsonProperty("partitionNum") @Override - public int getPartitionNum() { + public int getPartitionNum() + { return partitionNum; } @@ -70,12 +71,14 @@ public class LinearShardSpec implements ShardSpec } @Override - public PartitionChunk createChunk(T obj) { + public PartitionChunk createChunk(T obj) + { return new LinearPartitionChunk(partitionNum, obj); } @Override - public boolean isInChunk(long timestamp, InputRow inputRow) { + public boolean isInChunk(long timestamp, InputRow inputRow) + { return true; } diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index e72d54b98ec..002cea6a2f9 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -2908,7 +2908,8 @@ public class CachingClusteredClientTest { try { return baseSegment.getShardSpec(); - } catch (IllegalStateException e) { + } + catch (IllegalStateException e) { return NoneShardSpec.instance(); } } diff --git a/server/src/test/java/io/druid/client/cache/CacheConfigTest.java b/server/src/test/java/io/druid/client/cache/CacheConfigTest.java index cd143816777..10e9d6ee4c2 100644 --- a/server/src/test/java/io/druid/client/cache/CacheConfigTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheConfigTest.java @@ -48,10 +48,12 @@ public class CacheConfigTest private static final String propertyPrefix = "io.druid.test.cache"; @BeforeClass - public static void populateStatics(){ + public static void populateStatics() + { injector = GuiceInjectors.makeStartupInjectorWithModules(ImmutableList.of(new CacheConfigTestModule())); configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); } + private static class CacheConfigTestModule implements DruidModule { @@ -67,10 +69,12 @@ public class CacheConfigTest JsonConfigProvider.bind(binder,propertyPrefix,CacheConfig.class); } } + private Properties properties = new Properties(); @Before - public void setupTest(){ + public void setupTest() + { properties.clear(); configProvider = JsonConfigProvider.of(propertyPrefix, CacheConfig.class); } diff --git a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java index bf8878fb739..fbd37b537d3 100644 --- a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java @@ -53,7 +53,8 @@ public class CacheDistributionTest public static final int KEY_COUNT = 1_000_000; @Parameterized.Parameters(name = "repetitions={0}, hash={1}") - public static Iterable data() { + public static Iterable data() + { List hash = ImmutableList.of( DefaultHashAlgorithm.FNV1A_64_HASH, DefaultHashAlgorithm.KETAMA_HASH, MemcachedCache.MURMUR3_128 ); @@ -80,7 +81,8 @@ public class CacheDistributionTest final int reps; @BeforeClass - public static void header() { + public static void header() + { System.out.printf( Locale.ENGLISH, "%25s\t%5s\t%10s\t%10s\t%10s\t%10s\t%10s\t%7s\t%5s%n", @@ -145,7 +147,8 @@ public class CacheDistributionTest System.out.printf(Locale.ENGLISH, "%7.2f\t%5.0f%n", (double) min / (double) max, (double)t / KEY_COUNT); } - private static MemcachedNode dummyNode(String host, int port) { + private static MemcachedNode dummyNode(String host, int port) + { SocketAddress address = InetSocketAddress.createUnresolved(host, port); MemcachedNode node = EasyMock.createNiceMock(MemcachedNode.class); EasyMock.expect(node.getSocketAddress()).andReturn(address).anyTimes(); diff --git a/server/src/test/java/io/druid/curator/CuratorTestBase.java b/server/src/test/java/io/druid/curator/CuratorTestBase.java index c0a4aa67888..d50f65cb271 100644 --- a/server/src/test/java/io/druid/curator/CuratorTestBase.java +++ b/server/src/test/java/io/druid/curator/CuratorTestBase.java @@ -156,9 +156,9 @@ public class CuratorTestBase try { curator.close(); server.close(); - } catch(IOException ex) - { - throw Throwables.propagate(ex); + } + catch (IOException ex) { + throw new RuntimeException(ex); } } diff --git a/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java b/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java index 9acd78599bd..1e54b7a759b 100644 --- a/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java +++ b/server/src/test/java/io/druid/curator/inventory/CuratorInventoryManagerTest.java @@ -114,14 +114,12 @@ public class CuratorInventoryManagerTest extends CuratorTestBase final CountDownLatch latch = new CountDownLatch(1); curator.getCuratorListenable().addListener( - new CuratorListener() - { + new CuratorListener() { @Override public void eventReceived(CuratorFramework client, CuratorEvent event) throws Exception { if (event.getType() == CuratorEventType.WATCHED - && event.getWatchedEvent().getState() == Watcher.Event.KeeperState.Disconnected) - { + && event.getWatchedEvent().getState() == Watcher.Event.KeeperState.Disconnected) { latch.countDown(); } } diff --git a/server/src/test/java/io/druid/guice/DruidProcessingModuleTest.java b/server/src/test/java/io/druid/guice/DruidProcessingModuleTest.java index 30ecef8709b..b3be5cfb161 100644 --- a/server/src/test/java/io/druid/guice/DruidProcessingModuleTest.java +++ b/server/src/test/java/io/druid/guice/DruidProcessingModuleTest.java @@ -27,7 +27,8 @@ public class DruidProcessingModuleTest { @Test(expected=ProvisionException.class) - public void testMemoryCheckThrowsException() { + public void testMemoryCheckThrowsException() + { DruidProcessingModule module = new DruidProcessingModule(); module.getIntermediateResultsPool(new DruidProcessingConfig() { diff --git a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java index 87ef70454df..f9d1c71e94e 100644 --- a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java +++ b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java @@ -56,7 +56,8 @@ public abstract class JsonConfigTesterBase protected int assertions = 0; protected Properties testProperties = new Properties(); - protected static String getPropertyKey(String fieldName){ + protected static String getPropertyKey(String fieldName) + { return StringUtils.format( "%s.%s", configPrefix, fieldName diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java index 660c985fa17..eb2c39c3c29 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java @@ -38,7 +38,8 @@ public class SQLMetadataConnectorTest private MetadataStorageTablesConfig tablesConfig; @Before - public void setUp() throws Exception { + public void setUp() throws Exception + { connector = derbyConnectorRule.getConnector(); tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); } diff --git a/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java b/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java index 9bd895640e7..2d17a7d15af 100644 --- a/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java +++ b/server/src/test/java/io/druid/query/lookup/LookupIntrospectionResourceTest.java @@ -84,7 +84,8 @@ public class LookupIntrospectionResourceTest LookupIntrospectHandler lookupIntrospectHandler = new LookupIntrospectHandler() { @POST - public Response postMock(InputStream inputStream){ + public Response postMock(InputStream inputStream) + { return Response.ok().build(); } }; diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java index a53698ca121..957d04a8e0d 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/ArbitraryGranularityTest.java @@ -132,7 +132,8 @@ public class ArbitraryGranularityTest boolean thrown = false; try { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, intervals); - } catch(IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { thrown = true; } @@ -165,7 +166,8 @@ public class ArbitraryGranularityTest boolean thrown = false; try { final GranularitySpec spec = new ArbitraryGranularitySpec(Granularities.NONE, intervals); - } catch(IllegalArgumentException e) { + } + catch (IllegalArgumentException e) { thrown = true; } @@ -186,7 +188,8 @@ public class ArbitraryGranularityTest try { final GranularitySpec rtSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), GranularitySpec.class); Assert.assertEquals("Round-trip", spec.bucketIntervals(), rtSpec.bucketIntervals()); - } catch(Exception e) { + } + catch (Exception e) { throw Throwables.propagate(e); } } diff --git a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java index 289fea66b93..cec80b3e7b0 100644 --- a/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/io/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -177,7 +177,8 @@ public class UniformGranularityTest ); } - public void equalsCheck(GranularitySpec spec1, GranularitySpec spec2) { + public void equalsCheck(GranularitySpec spec1, GranularitySpec spec2) + { Assert.assertEquals(spec1, spec2); Assert.assertEquals(spec1.hashCode(), spec2.hashCode()); } @@ -235,7 +236,8 @@ public class UniformGranularityTest } @Test - public void testPeriodSegmentGranularity() { + public void testPeriodSegmentGranularity() + { final GranularitySpec spec = new UniformGranularitySpec( new PeriodGranularity(new Period("P1D"), null, DateTimeZone.forID("America/Los_Angeles")), null, @@ -274,7 +276,8 @@ public class UniformGranularityTest Assert.assertEquals(expectedIntervals, actualIntervals); } - private void notEqualsCheck(GranularitySpec spec1, GranularitySpec spec2) { + private void notEqualsCheck(GranularitySpec spec1, GranularitySpec spec2) + { Assert.assertNotEquals(spec1, spec2); Assert.assertNotEquals(spec1.hashCode(), spec2.hashCode()); } diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java index 316e86b6a19..f84961a45ab 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorPlumberTest.java @@ -42,7 +42,8 @@ public class AppenderatorPlumberTest private final AppenderatorPlumber plumber; private final AppenderatorTester appenderatorTester; - public AppenderatorPlumberTest() throws Exception { + public AppenderatorPlumberTest() throws Exception + { this.appenderatorTester = new AppenderatorTester(10); DataSegmentAnnouncer segmentAnnouncer = EasyMock .createMock(DataSegmentAnnouncer.class); @@ -92,7 +93,8 @@ public class AppenderatorPlumberTest } @Test - public void testSimpleIngestion() throws Exception { + public void testSimpleIngestion() throws Exception + { final ConcurrentMap commitMetadata = new ConcurrentHashMap<>(); diff --git a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestHistoryTest.java b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestHistoryTest.java index fe2c92306f7..a7719b89bd2 100644 --- a/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestHistoryTest.java +++ b/server/src/test/java/io/druid/server/coordination/SegmentChangeRequestHistoryTest.java @@ -93,7 +93,8 @@ public class SegmentChangeRequestHistoryTest try { history.getRequestsSince(new SegmentChangeRequestHistory.Counter(0, 1234)).get(); Assert.fail(); - } catch (ExecutionException ex) { + } + catch (ExecutionException ex) { Assert.assertTrue(ex.getCause() instanceof IllegalArgumentException); } @@ -106,7 +107,8 @@ public class SegmentChangeRequestHistoryTest try { history.getRequestsSince(new SegmentChangeRequestHistory.Counter(0, 1234)).get(); Assert.fail(); - } catch (ExecutionException ex) { + } + catch (ExecutionException ex) { Assert.assertTrue(ex.getCause() instanceof IllegalArgumentException); } @@ -117,7 +119,8 @@ public class SegmentChangeRequestHistoryTest try { history.getRequestsSince(new SegmentChangeRequestHistory.Counter(1, 1234)).get(); Assert.fail(); - } catch (ExecutionException ex) { + } + catch (ExecutionException ex) { Assert.assertTrue(ex.getCause() instanceof IllegalArgumentException); } @@ -134,7 +137,8 @@ public class SegmentChangeRequestHistoryTest try { history.getRequestsSince(new SegmentChangeRequestHistory.Counter(2, 1234)).get(); Assert.fail(); - } catch (ExecutionException ex) { + } + catch (ExecutionException ex) { Assert.assertTrue(ex.getCause() instanceof IllegalArgumentException); } } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java index 051fc5a5d32..fa2c7fcfc5e 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorTest.java @@ -261,7 +261,8 @@ public class DruidCoordinatorTest extends CuratorTestBase } @Test(timeout = 60_000L) - public void testCoordinatorRun() throws Exception{ + public void testCoordinatorRun() throws Exception + { String dataSource = "dataSource1"; String tier= "hot"; diff --git a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java index 04f6bfc2af8..720643f6c2e 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/ForeverLoadRuleTest.java @@ -49,7 +49,8 @@ public class ForeverLoadRuleTest } @Test - public void testMappingNullTieredReplicants() throws Exception{ + public void testMappingNullTieredReplicants() throws Exception + { String inputJson = "{\n" + " \"type\": \"loadForever\"\n" + "}"; diff --git a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java index 297977fc99d..4460e32c239 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/IntervalLoadRuleTest.java @@ -60,7 +60,8 @@ import org.junit.Test; } @Test - public void testMappingNullTieredReplicants() throws Exception{ + public void testMappingNullTieredReplicants() throws Exception + { String inputJson = " {\n" + " \"interval\": \"0000-01-01T00:00:00.000-05:50:36/3000-01-01T00:00:00.000-06:00\",\n" + " \"type\": \"loadByInterval\"\n" diff --git a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java index e0b7d0bfb08..a487630b714 100644 --- a/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java +++ b/server/src/test/java/io/druid/server/coordinator/rules/PeriodLoadRuleTest.java @@ -96,7 +96,8 @@ public class PeriodLoadRuleTest } @Test - public void testMappingNullTieredReplicants() throws Exception{ + public void testMappingNullTieredReplicants() throws Exception + { String inputJson = "{\n" + " \"period\": \"P1D\",\n" + " \"type\": \"loadByPeriod\"\n" diff --git a/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java b/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java index c0050e781e8..5f17c37bd2b 100644 --- a/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java +++ b/server/src/test/java/io/druid/server/http/CoordinatorDynamicConfigTest.java @@ -132,7 +132,8 @@ public class CoordinatorDynamicConfigTest ); Assert.fail("deserialization should fail."); - } catch (JsonMappingException e) { + } + catch (JsonMappingException e) { Assert.assertTrue(e.getCause() instanceof IAE); } } diff --git a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java index 74f98197488..b9e31b48603 100644 --- a/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java +++ b/server/src/test/java/io/druid/server/http/DatasourcesResourceTest.java @@ -464,7 +464,8 @@ public class DatasourcesResourceTest } @Test - public void testDeleteDataSource() { + public void testDeleteDataSource() + { IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class); EasyMock.replay(indexingServiceClient, server); DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, indexingServiceClient, new AuthConfig()); diff --git a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java index 3e7e69b86d2..3c437cc8f2a 100644 --- a/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java +++ b/server/src/test/java/io/druid/server/http/IntervalsResourceTest.java @@ -189,7 +189,8 @@ public class IntervalsResourceTest } @After - public void tearDown() { + public void tearDown() + { EasyMock.verify(inventoryView); } diff --git a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java index 7194bce4c69..6a9dde6bd73 100644 --- a/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java +++ b/server/src/test/java/io/druid/server/lookup/cache/LookupCoordinatorManagerTest.java @@ -789,7 +789,8 @@ public class LookupCoordinatorManagerTest try { manager.updateLookups(TIERED_LOOKUP_MAP_V0, auditInfo); Assert.fail(); - } catch (IAE ex) { + } + catch (IAE ex) { } } @@ -1436,9 +1437,11 @@ public class LookupCoordinatorManagerTest discoverer, mapper, configManager, - new LookupCoordinatorManagerConfig(){ + new LookupCoordinatorManagerConfig() + { @Override - public long getPeriod(){ + public long getPeriod() + { return 1; } } diff --git a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java index c31a5f15c4f..f036235d3ef 100644 --- a/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java +++ b/services/src/main/java/io/druid/cli/CliInternalHadoopIndexer.java @@ -154,7 +154,8 @@ public class CliInternalHadoopIndexer extends GuiceRunnable // File URI. localConfigFile = new File(argumentSpecUri.getPath()); } - } catch (URISyntaxException e) { + } + catch (URISyntaxException e) { // Not a URI, assume it's a local file. localConfigFile = new File(argumentSpec); } diff --git a/services/src/main/java/io/druid/cli/ResetCluster.java b/services/src/main/java/io/druid/cli/ResetCluster.java index b11e66f97b5..7f2fb6a9583 100644 --- a/services/src/main/java/io/druid/cli/ResetCluster.java +++ b/services/src/main/java/io/druid/cli/ResetCluster.java @@ -163,7 +163,8 @@ public class ResetCluster extends GuiceRunnable DataSegmentKiller segmentKiller = injector.getInstance(DataSegmentKiller.class); segmentKiller.killAll(); - } catch (Exception ex) { + } + catch (Exception ex) { log.error(ex, "Failed to cleanup Segment Files."); } } @@ -177,7 +178,8 @@ public class ResetCluster extends GuiceRunnable TaskLogKiller taskLogKiller = injector.getInstance(TaskLogKiller.class);; taskLogKiller.killAll(); - } catch (Exception ex) { + } + catch (Exception ex) { log.error(ex, "Failed to cleanup TaskLogs."); } } diff --git a/services/src/main/java/io/druid/cli/ValidateSegments.java b/services/src/main/java/io/druid/cli/ValidateSegments.java index 760d09148de..4c910c04fd2 100644 --- a/services/src/main/java/io/druid/cli/ValidateSegments.java +++ b/services/src/main/java/io/druid/cli/ValidateSegments.java @@ -47,7 +47,10 @@ public class ValidateSegments extends GuiceRunnable { private static final Logger log = new Logger(ValidateSegments.class); - public ValidateSegments() { super(log); } + public ValidateSegments() + { + super(log); + } @Arguments( description = "Two directories where each directory contains segment files to validate.", @@ -55,7 +58,8 @@ public class ValidateSegments extends GuiceRunnable public List directories; @Override - public void run() { + public void run() + { if (directories.size() != 2) { throw new IAE("Please provide two segment directories to compare"); } @@ -66,7 +70,8 @@ public class ValidateSegments extends GuiceRunnable String dir2 = directories.get(1); indexIO.validateTwoSegments(new File(dir1), new File(dir2)); log.info("Segments [%s] and [%s] are identical", dir1, dir2); - } catch (Exception e) { + } + catch (Exception e) { throw Throwables.propagate(e); } } diff --git a/services/src/main/java/io/druid/cli/convert/DataSegmentPusherDefaultConverter.java b/services/src/main/java/io/druid/cli/convert/DataSegmentPusherDefaultConverter.java index 9ff567c6636..eba62944770 100644 --- a/services/src/main/java/io/druid/cli/convert/DataSegmentPusherDefaultConverter.java +++ b/services/src/main/java/io/druid/cli/convert/DataSegmentPusherDefaultConverter.java @@ -45,11 +45,9 @@ public class DataSegmentPusherDefaultConverter implements PropertyConverter String type = null; if (Boolean.parseBoolean(props.getProperty("druid.pusher.local", "false"))) { type = "local"; - } - else if (Boolean.parseBoolean(props.getProperty("druid.pusher.cassandra", "false"))) { + } else if (Boolean.parseBoolean(props.getProperty("druid.pusher.cassandra", "false"))) { type = "c*"; - } - else if (Boolean.parseBoolean(props.getProperty("druid.pusher.hdfs", "false"))) { + } else if (Boolean.parseBoolean(props.getProperty("druid.pusher.hdfs", "false"))) { type = "hdfs"; } diff --git a/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java b/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java index b5cf21869c0..fd29e11042e 100644 --- a/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java +++ b/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java @@ -58,8 +58,7 @@ public class DatabasePropertiesConverter implements PropertyConverter if (tablePrefix == null) { tablePrefix = "druid"; - } - else { + } else { tablePrefix = tablePrefix.split("_")[0]; } diff --git a/services/src/main/java/io/druid/cli/convert/ValueConverter.java b/services/src/main/java/io/druid/cli/convert/ValueConverter.java deleted file mode 100644 index 2a1b10bec67..00000000000 --- a/services/src/main/java/io/druid/cli/convert/ValueConverter.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.cli.convert; - -import com.google.common.collect.ImmutableMap; - -import java.util.Map; -import java.util.Properties; - -/** - * - */ -public class ValueConverter implements PropertyConverter -{ - private final Map valueMap; - private final String property; - public ValueConverter(String property, Map valueMap){ - this.property = property; - this.valueMap = valueMap; - } - - @Override - public boolean canHandle(String property) - { - return this.property.equals(property); - } - - @Override - public Map convert(Properties properties) - { - final String oldValue = properties.getProperty(this.property); - if(null == oldValue){ - return ImmutableMap.of(); - } - final String newValue = valueMap.get(oldValue); - if(null == newValue){ - return ImmutableMap.of(); - } - return ImmutableMap.of(this.property, newValue); - } -} diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index 212d9a4ffa6..47e874a8527 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -560,7 +560,7 @@ public class DruidAvaticaHandlerTest Frame frame = super.fetch(statement, offset, fetchMaxRowCount); frames.add(frame); return frame; - }; + } }; final DruidAvaticaHandler handler = new DruidAvaticaHandler(