From 9ae457f7ad1a4771bc5aacab3005d91d881b365e Mon Sep 17 00:00:00 2001 From: Roman Leventov Date: Thu, 29 Jun 2017 16:06:19 -0500 Subject: [PATCH] Avoid using the default system Locale and printing to System.out in production code (#4409) * Avoid usages of Default system Locale and printing to System.out or System.err in production code * Fix Charset in DruidKerberosUtil * Remove redundant string format in GenericIndexed * Rename StringUtils.safeFormat() to unimportantSafeFormat(); add StringUtils.format() which fails as well as String.format() * Fix testSafeFormat() * More fixes of redundant StringUtils.format() inside ISE * Rename unimportantSafeFormat() to nonStrictFormat() --- .../data/input/impl/DimensionSchema.java | 9 +- .../data/input/impl/JSONPathFieldType.java | 5 +- .../data/input/impl/MapInputRowParser.java | 3 +- .../java/io/druid/guice/JsonConfigurator.java | 11 +- .../main/java/io/druid/guice/PolyBind.java | 5 +- .../java/io/druid/segment/SegmentUtils.java | 8 +- .../segment/loading/DataSegmentPusher.java | 9 +- .../loading/SegmentLoadingException.java | 6 +- .../io/druid/timeline/DataSegmentUtils.java | 5 +- ...loatCompressionBenchmarkFileGenerator.java | 6 +- .../IncrementalIndexRowTypeBenchmark.java | 15 +- ...LongCompressionBenchmarkFileGenerator.java | 6 +- .../druid/benchmark/VSizeSerdeBenchmark.java | 4 +- .../bitmap/ConciseBitmapFactory.java | 3 +- .../bitmap/RoaringBitmapFactory.java | 3 +- .../bitmap/WrappedBitSetBitmap.java | 42 +-- .../bitmap/WrappedRoaringBitmap.java | 2 +- .../druid/collections/spatial/RTreeUtils.java | 178 ---------- .../collections/bitmap/BitmapBenchmark.java | 30 +- .../bitmap/RangeBitmapBenchmarkTest.java | 17 +- .../bitmap/UniformBitmapBenchmarkTest.java | 17 +- .../spatial/ImmutableRTreeTest.java | 21 +- codestyle/checkstyle-suppressions.xml | 4 + .../io/druid/common/config/Log4jShutdown.java | 5 +- .../io/druid/common/guava/GuavaUtils.java | 3 +- .../common/utils/ServletResourceUtils.java | 2 +- .../java/io/druid/common/utils/VMUtils.java | 9 +- .../io/druid/math/expr/ExprMacroTable.java | 6 +- .../java/io/druid/math/expr/Function.java | 3 +- .../main/java/io/druid/math/expr/Parser.java | 5 +- .../druid/metadata/EntryExistsException.java | 4 +- .../MetadataStorageConnectorConfig.java | 3 +- .../metadata/MetadataStorageTablesConfig.java | 3 +- .../timeline/VersionedIntervalTimeline.java | 13 +- .../TwitterSpritzerFirehoseFactory.java | 11 +- extendedset/pom.xml | 7 + .../druid/extendedset/intset/ConciseSet.java | 4 +- .../druid/extendedset/utilities/BitCount.java | 333 ------------------ .../intset/ImmutableConciseSetTest.java | 3 +- .../ambari/metrics/AmbariMetricsEmitter.java | 3 +- .../storage/azure/AzureDataSegmentPusher.java | 5 +- .../azure/AzureStorageDruidModule.java | 3 +- .../io/druid/storage/azure/AzureTaskLogs.java | 7 +- .../azure/AzureDataSegmentPusherTest.java | 5 +- .../rocketmq/RocketMQFirehoseFactory.java | 3 +- .../google/GoogleDataSegmentPusher.java | 3 +- .../druid/storage/google/GoogleTaskLogs.java | 5 +- ...fkaEightSimpleConsumerFirehoseFactory.java | 3 +- .../firehose/kafka/KafkaSimpleConsumer.java | 3 +- .../input/orc/OrcHadoopInputRowParser.java | 21 +- .../input/orc/OrcIndexGeneratorJobTest.java | 2 +- .../rabbitmq/RabbitMQProducerMain.java | 11 +- .../storage/sqlserver/SQLServerConnector.java | 27 +- .../input/AvroHadoopInputRowParserTest.java | 3 +- .../client/cache/CacheExecutorFactory.java | 4 +- .../security/kerberos/DruidKerberosUtil.java | 4 +- .../storage/hdfs/HdfsDataSegmentFinder.java | 7 +- .../storage/hdfs/HdfsDataSegmentKiller.java | 5 +- .../storage/hdfs/HdfsDataSegmentPuller.java | 3 +- .../storage/hdfs/HdfsDataSegmentPusher.java | 20 +- .../storage/hdfs/tasklog/HdfsTaskLogs.java | 3 +- .../common/tasklogs/HdfsTaskLogsTest.java | 2 +- .../loading/HdfsDataSegmentFinderTest.java | 3 +- .../loading/HdfsDataSegmentPullerTest.java | 3 +- .../HdfsFileTimestampVersionFinderTest.java | 3 +- .../hdfs/HdfsDataSegmentKillerTest.java | 5 +- .../hdfs/HdfsDataSegmentPusherTest.java | 23 +- .../histogram/sql/QuantileSqlAggregator.java | 3 +- .../ApproximateHistogramErrorBenchmark.java | 26 +- .../ApproximateHistogramGroupByQueryTest.java | 3 +- .../histogram/ApproximateHistogramTest.java | 3 +- .../lookup/TestKafkaExtractionCluster.java | 2 +- .../druid/indexing/kafka/KafkaIndexTask.java | 11 +- .../indexing/kafka/KafkaIndexTaskClient.java | 16 +- .../druid/indexing/kafka/KafkaPartitions.java | 3 +- .../kafka/supervisor/KafkaSupervisor.java | 7 +- .../supervisor/KafkaSupervisorIOConfig.java | 3 +- .../kafka/KafkaIndexTaskClientTest.java | 22 +- .../indexing/kafka/KafkaIndexTaskTest.java | 4 +- .../kafka/supervisor/KafkaSupervisorTest.java | 9 +- .../druid/indexing/kafka/test/TestBroker.java | 7 +- .../NamespaceLookupExtractorFactory.java | 2 +- .../namespace/JdbcExtractionNamespace.java | 3 +- .../namespace/UriExtractionNamespace.java | 7 +- .../lookup/namespace/JdbcCacheGenerator.java | 7 +- .../lookup/namespace/UriCacheGenerator.java | 3 +- .../namespace/cache/CacheScheduler.java | 3 +- .../namespace/UriExtractionNamespaceTest.java | 25 +- .../namespace/cache/CacheSchedulerTest.java | 3 +- .../cache/JdbcExtractionNamespaceTest.java | 11 +- .../cache/polling/OffHeapPollingCache.java | 5 +- .../server/lookup/jdbc/JdbcDataFetcher.java | 7 +- .../lookup/jdbc/JdbcDataFetcherTest.java | 9 +- .../storage/mysql/MySQLConnector.java | 3 +- .../postgresql/PostgreSQLConnector.java | 5 +- .../protobuf/ProtobufInputRowParser.java | 3 +- .../druid/storage/s3/S3DataSegmentMover.java | 2 +- .../druid/storage/s3/S3DataSegmentPuller.java | 16 +- .../druid/storage/s3/S3DataSegmentPusher.java | 5 +- .../java/io/druid/storage/s3/S3TaskLogs.java | 6 +- .../s3/S3TimestampVersionedDataFinder.java | 3 +- .../storage/s3/S3DataSegmentPullerTest.java | 5 +- .../S3TimestampVersionedDataFinderTest.java | 15 +- .../druid/hll/HyperLogLogCollectorTest.java | 12 +- .../indexer/DetermineHashedPartitionsJob.java | 3 +- .../druid/indexer/DeterminePartitionsJob.java | 5 +- .../indexer/HadoopDruidIndexerConfig.java | 9 +- .../io/druid/indexer/IndexGeneratorJob.java | 7 +- .../main/java/io/druid/indexer/JobHelper.java | 48 ++- .../path/GranularUnprocessedPathSpec.java | 5 +- .../indexer/updater/HadoopConverterJob.java | 5 +- .../indexer/BatchDeltaIngestionTest.java | 3 +- .../druid/indexer/HdfsClasspathSetupTest.java | 7 +- .../druid/indexer/IndexGeneratorJobTest.java | 7 +- .../indexer/path/GranularityPathSpecTest.java | 15 +- .../actions/RemoteTaskActionClient.java | 12 +- .../common/index/YeOldePlumberSchool.java | 3 +- .../common/task/ConvertSegmentTask.java | 3 +- .../indexing/common/task/HadoopIndexTask.java | 3 +- .../druid/indexing/common/task/IndexTask.java | 5 +- .../indexing/common/task/MergeTaskBase.java | 7 +- .../druid/indexing/common/task/NoopTask.java | 5 +- .../common/task/RealtimeIndexTask.java | 5 +- .../common/tasklogs/FileTaskLogs.java | 8 +- .../indexing/overlord/ForkingTaskRunner.java | 26 +- .../overlord/MetadataTaskStorage.java | 3 +- .../indexing/overlord/RemoteTaskRunner.java | 9 +- .../overlord/http/OverlordResource.java | 7 +- .../security/SupervisorResourceFilter.java | 5 +- .../http/security/TaskResourceFilter.java | 5 +- .../supervisor/SupervisorResource.java | 11 +- .../indexing/worker/http/WorkerResource.java | 3 +- .../indexing/common/task/HadoopTaskTest.java | 3 +- .../common/task/RealtimeIndexTaskTest.java | 2 +- .../common/tasklogs/FileTaskLogsTest.java | 2 +- .../IngestSegmentFirehoseFactoryTest.java | 6 +- .../overlord/RemoteTaskRunnerTestUtils.java | 7 +- .../indexing/overlord/TaskLifecycleTest.java | 6 +- .../worker/WorkerTaskMonitorTest.java | 5 +- .../worker/http/WorkerResourceTest.java | 3 +- .../initialization/IndexerZkConfigTest.java | 17 +- .../testing/ConfigFileConfigProvider.java | 11 +- .../clients/ClientInfoResourceTestClient.java | 5 +- .../CoordinatorResourceTestClient.java | 11 +- .../EventReceiverFirehoseTestClient.java | 3 +- .../clients/OverlordResourceTestClient.java | 10 +- .../clients/QueryResourceTestClient.java | 3 +- .../testing/utils/ServerDiscoveryUtil.java | 3 +- .../druid/testing/utils/TestQueryHelper.java | 3 +- .../org/testng/DruidTestRunnerFactory.java | 9 +- .../indexer/ITKafkaIndexingServiceTest.java | 3 +- .../io/druid/tests/indexer/ITKafkaTest.java | 3 +- .../indexer/ITRealtimeIndexTaskTest.java | 3 +- .../java/util/common/CompressionUtils.java | 4 +- .../java/io/druid/java/util/common/IAE.java | 4 +- .../java/io/druid/java/util/common/IOE.java | 4 +- .../java/io/druid/java/util/common/ISE.java | 4 +- .../java/io/druid/java/util/common/RE.java | 4 +- .../druid/java/util/common/StringUtils.java | 25 +- .../io/druid/java/util/common/Timing.java | 49 --- .../java/io/druid/java/util/common/UOE.java | 4 +- .../util/common/granularity/Granularity.java | 3 +- .../util/common/io/smoosh/FileSmoosher.java | 12 +- .../druid/java/util/common/logger/Logger.java | 24 +- .../common/parsers/JSONToLowerParser.java | 9 +- .../util/common/parsers/ParseException.java | 6 +- .../java/util/common/parsers/ParserUtils.java | 3 +- .../common/parsers/ToLowerCaseParser.java | 3 +- .../util/common/CompressionUtilsTest.java | 10 +- .../java/util/common/GranularityTest.java | 41 ++- .../java/util/common/StringUtilsTest.java | 6 +- .../common/guava/FilteredSequenceTest.java | 3 +- .../util/common/guava/MappedSequenceTest.java | 3 +- .../io/smoosh/SmooshedFileMapperTest.java | 25 +- .../druid/query/ReflectionLoaderThingy.java | 3 +- .../query/ResourceLimitExceededException.java | 2 +- .../query/aggregation/AggregatorFactory.java | 6 +- ...ggregatorFactoryNotMergeableException.java | 6 +- .../query/extraction/BucketExtractionFn.java | 3 +- .../extraction/MatchingDimExtractionFn.java | 2 +- .../extraction/RegexDimExtractionFn.java | 2 +- .../extraction/StringFormatExtractionFn.java | 6 +- .../extraction/SubstringDimExtractionFn.java | 3 +- .../extraction/TimeFormatExtractionFn.java | 2 +- .../io/druid/query/filter/AndDimFilter.java | 3 +- .../io/druid/query/filter/BoundDimFilter.java | 4 +- .../query/filter/ExtractionDimFilter.java | 2 +- .../io/druid/query/filter/OrDimFilter.java | 3 +- .../druid/query/filter/SelectorDimFilter.java | 4 +- .../query/groupby/GroupByQueryHelper.java | 10 +- .../epinephelinae/ByteBufferIntList.java | 3 +- .../GroupByMergingQueryRunnerV2.java | 3 +- .../epinephelinae/GroupByRowProcessor.java | 3 +- .../LimitedTemporaryStorage.java | 3 +- .../TemporaryStorageFullException.java | 4 +- .../groupby/orderby/OrderByColumnSpec.java | 4 +- .../druid/query/metadata/SegmentAnalyzer.java | 4 +- .../metadata/SegmentMetadataQuery.java | 5 +- .../query/ordering/StringComparator.java | 3 +- .../query/search/search/SearchSortSpec.java | 3 +- .../spec/SpecificSegmentQueryRunner.java | 3 +- .../CompressedVSizeIndexedSupplier.java | 2 +- .../druid/segment/FloatColumnSerializer.java | 3 +- .../main/java/io/druid/segment/IndexIO.java | 18 +- .../druid/segment/LongColumnSerializer.java | 3 +- .../segment/SegmentMissingException.java | 3 +- .../segment/SegmentValidationException.java | 6 +- .../segment/StringDimensionMergerV9.java | 9 +- .../io/druid/segment/column/ValueType.java | 3 +- .../data/BlockLayoutIndexedFloatSupplier.java | 3 +- .../data/BlockLayoutIndexedLongSupplier.java | 3 +- .../druid/segment/data/ByteBufferWriter.java | 3 +- .../data/CompressedIntsIndexedSupplier.java | 3 +- .../data/CompressedObjectStrategy.java | 5 +- .../data/CompressedVSizeIndexedV3Writer.java | 5 +- .../segment/data/CompressionFactory.java | 5 +- .../EntireLayoutIndexedFloatSupplier.java | 3 +- .../data/EntireLayoutIndexedLongSupplier.java | 3 +- .../io/druid/segment/data/GenericIndexed.java | 6 +- .../segment/data/GenericIndexedWriter.java | 6 +- .../io/druid/segment/data/VSizeIndexed.java | 2 +- .../segment/data/VSizeIndexedIntsWriter.java | 3 +- .../segment/data/VSizeIndexedWriter.java | 7 +- .../io/druid/segment/filter/AndFilter.java | 3 +- .../filter/DimensionPredicateFilter.java | 5 +- .../io/druid/segment/filter/OrFilter.java | 3 +- .../druid/segment/filter/SelectorFilter.java | 3 +- .../segment/incremental/IncrementalIndex.java | 3 +- .../IndexSizeExceededException.java | 6 +- .../incremental/OffheapIncrementalIndex.java | 3 +- .../incremental/OnheapIncrementalIndex.java | 3 +- .../serde/ComplexColumnSerializer.java | 3 +- ...olumnSupportedComplexColumnSerializer.java | 3 +- .../granularity/QueryGranularityTest.java | 9 +- .../jackson/DefaultObjectMapperTest.java | 3 +- .../aggregation/JavaScriptAggregatorTest.java | 9 +- .../aggregation/MetricManipulatorFnsTest.java | 3 +- .../HyperUniquesAggregatorFactoryTest.java | 11 +- .../dimension/LegacyDimensionSpecTest.java | 3 +- .../MapLookupExtractionFnSerDeTest.java | 5 +- .../query/groupby/GroupByQueryRunnerTest.java | 9 +- .../GroupByTimeseriesQueryRunnerTest.java | 3 +- .../search/SearchQueryRunnerWithCaseTest.java | 7 +- .../timeseries/TimeseriesQueryRunnerTest.java | 9 +- .../java/io/druid/segment/IndexBuilder.java | 7 +- .../druid/segment/SchemalessTestFullTest.java | 9 +- .../java/io/druid/segment/TestHelper.java | 35 +- .../data/BenchmarkIndexibleWrites.java | 5 +- .../data/CompressedFloatsSerdeTest.java | 5 +- .../CompressedIntsIndexedSupplierTest.java | 5 +- .../data/CompressedIntsIndexedWriterTest.java | 3 +- .../data/CompressedLongsSerdeTest.java | 5 +- .../CompressedVSizeIndexedV3WriterTest.java | 3 +- ...ompressedVSizeIntsIndexedSupplierTest.java | 5 +- .../druid/segment/data/IOPeonForTesting.java | 3 +- .../segment/data/IncrementalIndexTest.java | 55 +-- .../segment/data/UnioningOffsetTest.java | 5 +- .../druid/segment/filter/BaseFilterTest.java | 3 +- .../filter/SpatialFilterBonusTest.java | 5 +- .../OnheapIncrementalIndexBenchmark.java | 27 +- .../druid/client/CachingClusteredClient.java | 5 +- .../io/druid/client/DirectDruidClient.java | 10 +- .../druid/client/HttpServerInventoryView.java | 5 +- .../client/cache/BytesBoundedLinkedQueue.java | 6 +- .../io/druid/client/cache/CacheMonitor.java | 19 +- .../client/coordinator/CoordinatorClient.java | 3 +- .../indexing/IndexingServiceClient.java | 3 +- .../inventory/CuratorInventoryManager.java | 3 +- .../io/druid/guice/DruidProcessingModule.java | 3 +- .../io/druid/guice/http/HttpClientModule.java | 5 +- .../SQLMetadataStorageUpdaterJobHandler.java | 3 +- .../druid/initialization/Initialization.java | 10 +- .../IndexerSQLMetadataStorageCoordinator.java | 28 +- .../druid/metadata/SQLMetadataConnector.java | 49 +-- .../metadata/SQLMetadataRuleManager.java | 17 +- .../metadata/SQLMetadataSegmentManager.java | 17 +- .../metadata/SQLMetadataSegmentPublisher.java | 5 +- .../SQLMetadataStorageActionHandler.java | 22 +- .../SQLMetadataSupervisorManager.java | 7 +- .../storage/derby/DerbyConnector.java | 3 +- .../granularity/ArbitraryGranularitySpec.java | 9 +- .../loading/LocalDataSegmentPuller.java | 3 +- .../appenderator/AppenderatorImpl.java | 5 +- .../appenderator/AppenderatorPlumber.java | 3 +- .../SegmentNotWritableException.java | 4 +- .../realtime/plumber/FlushingPlumber.java | 3 +- .../MessageTimeRejectionPolicyFactory.java | 3 +- .../realtime/plumber/RealtimePlumber.java | 7 +- .../ServerTimeRejectionPolicyFactory.java | 3 +- .../java/io/druid/server/QueryResource.java | 3 +- .../java/io/druid/server/StatusResource.java | 7 +- .../druid/server/audit/SQLAuditManager.java | 9 +- .../BatchDataSegmentAnnouncer.java | 3 +- .../SegmentChangeRequestDrop.java | 3 +- .../SegmentChangeRequestHistory.java | 2 +- .../SegmentChangeRequestLoad.java | 3 +- .../druid/server/coordination/ServerType.java | 5 +- .../coordinator/ReplicationThrottler.java | 3 +- .../helper/DruidCoordinatorBalancer.java | 3 +- .../server/http/CoordinatorRedirectInfo.java | 5 +- .../http/security/ConfigResourceFilter.java | 3 +- .../security/DatasourceResourceFilter.java | 3 +- .../http/security/RulesResourceFilter.java | 3 +- .../http/security/StateResourceFilter.java | 3 +- .../druid/server/log/FileRequestLogger.java | 3 +- .../java/io/druid/server/security/Access.java | 4 +- .../client/CachingClusteredClientTest.java | 5 +- .../client/cache/CacheDistributionTest.java | 10 +- .../ServerDiscoverySelectorTest.java | 3 +- .../io/druid/guice/JsonConfigTesterBase.java | 7 +- .../initialization/ZkPathsConfigTest.java | 19 +- ...exerSQLMetadataStorageCoordinatorTest.java | 3 +- .../metadata/SQLMetadataConnectorTest.java | 4 +- .../metadata/SQLMetadataRuleManagerTest.java | 3 +- .../SQLMetadataSupervisorManagerTest.java | 3 +- .../io/druid/metadata/TestDerbyConnector.java | 3 +- .../loading/LocalDataSegmentPusherTest.java | 3 +- .../appenderator/AppenderatorDriverTest.java | 3 +- .../server/audit/SQLAuditManagerTest.java | 3 +- .../DruidCoordinatorBalancerTester.java | 3 +- .../security/ResourceFilterTestHelper.java | 3 +- .../shard/SingleDimensionShardSpecTest.java | 3 +- .../main/java/io/druid/cli/DumpSegment.java | 3 +- .../java/io/druid/cli/PullDependencies.java | 13 +- .../convert/DatabasePropertiesConverter.java | 5 +- .../cli/convert/IndexCacheConverter.java | 3 +- .../cli/validate/DruidJsonValidator.java | 4 +- .../io/druid/cli/PullDependenciesTest.java | 5 +- .../java/io/druid/sql/avatica/DruidMeta.java | 3 +- .../io/druid/sql/avatica/DruidStatement.java | 3 +- .../sql/calcite/expression/Expressions.java | 11 +- .../sql/calcite/expression/RowExtraction.java | 3 +- .../druid/sql/calcite/planner/Calcites.java | 6 +- .../calcite/planner/DruidOperatorTable.java | 14 +- .../druid/sql/calcite/rel/DruidSemiJoin.java | 3 +- .../druid/sql/calcite/schema/DruidSchema.java | 3 +- .../sql/avatica/DruidAvaticaHandlerTest.java | 7 +- .../druid/sql/calcite/CalciteQueryTest.java | 9 +- .../SpecificSegmentsQuerySegmentWalker.java | 5 +- 339 files changed, 1323 insertions(+), 1561 deletions(-) delete mode 100755 extendedset/src/main/java/io/druid/extendedset/utilities/BitCount.java delete mode 100644 java-util/src/main/java/io/druid/java/util/common/Timing.java diff --git a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java index 69dbb0cd0cb..b7800839712 100644 --- a/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java +++ b/api/src/main/java/io/druid/data/input/impl/DimensionSchema.java @@ -26,6 +26,7 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; /** */ @@ -57,13 +58,13 @@ public abstract class DimensionSchema @Override public String toString() { - return this.name().toUpperCase(); + return StringUtils.toUpperCase(this.name()); } @JsonCreator public static ValueType fromString(String name) { - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } } @@ -85,13 +86,13 @@ public abstract class DimensionSchema @JsonValue public String toString() { - return name().toUpperCase(); + return StringUtils.toUpperCase(name()); } @JsonCreator public static MultiValueHandling fromString(String name) { - return name == null ? ofDefault() : valueOf(name.toUpperCase()); + return name == null ? ofDefault() : valueOf(StringUtils.toUpperCase(name)); } // this can be system configuration diff --git a/api/src/main/java/io/druid/data/input/impl/JSONPathFieldType.java b/api/src/main/java/io/druid/data/input/impl/JSONPathFieldType.java index d99ad77c44e..14bfcb50116 100644 --- a/api/src/main/java/io/druid/data/input/impl/JSONPathFieldType.java +++ b/api/src/main/java/io/druid/data/input/impl/JSONPathFieldType.java @@ -21,6 +21,7 @@ package io.druid.data.input.impl; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; +import io.druid.java.util.common.StringUtils; public enum JSONPathFieldType { @@ -31,12 +32,12 @@ public enum JSONPathFieldType @Override public String toString() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } @JsonCreator public static JSONPathFieldType fromString(String name) { - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } } diff --git a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java index 1fafa37a624..acfa0e3e3d8 100644 --- a/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java +++ b/api/src/main/java/io/druid/data/input/impl/MapInputRowParser.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.parsers.ParseException; import org.joda.time.DateTime; @@ -62,7 +63,7 @@ public class MapInputRowParser implements InputRowParser> if (timestamp == null) { final String input = theMap.toString(); throw new NullPointerException( - String.format( + StringUtils.format( "Null timestamp in input: %s", input.length() < 100 ? input : input.substring(0, 100) + "..." ) diff --git a/api/src/main/java/io/druid/guice/JsonConfigurator.java b/api/src/main/java/io/druid/guice/JsonConfigurator.java index dd829505db0..ef903320744 100644 --- a/api/src/main/java/io/druid/guice/JsonConfigurator.java +++ b/api/src/main/java/io/druid/guice/JsonConfigurator.java @@ -34,6 +34,7 @@ import com.google.common.collect.Maps; import com.google.inject.Inject; import com.google.inject.ProvisionException; import com.google.inject.spi.Message; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import javax.validation.ConstraintViolation; @@ -102,7 +103,7 @@ public class JsonConfigurator } catch (IllegalArgumentException e) { throw new ProvisionException( - String.format("Problem parsing object at prefix[%s]: %s.", propertyPrefix, e.getMessage()), e + StringUtils.format("Problem parsing object at prefix[%s]: %s.", propertyPrefix, e.getMessage()), e ); } @@ -122,7 +123,7 @@ public class JsonConfigurator final Field theField = beanClazz.getDeclaredField(fieldName); if (theField.getAnnotation(JacksonInject.class) != null) { - path = String.format(" -- Injected field[%s] not bound!?", fieldName); + path = StringUtils.format(" -- Injected field[%s] not bound!?", fieldName); break; } @@ -142,7 +143,7 @@ public class JsonConfigurator throw Throwables.propagate(e); } - messages.add(String.format("%s - %s", path, violation.getMessage())); + messages.add(StringUtils.format("%s - %s", path, violation.getMessage())); } throw new ProvisionException( @@ -153,7 +154,7 @@ public class JsonConfigurator @Override public Message apply(String input) { - return new Message(String.format("%s%s", propertyBase, input)); + return new Message(StringUtils.format("%s%s", propertyBase, input)); } } ) @@ -175,7 +176,7 @@ public class JsonConfigurator final AnnotatedField field = beanDef.getField(); if (field == null || !field.hasAnnotation(JsonProperty.class)) { throw new ProvisionException( - String.format( + StringUtils.format( "JsonConfigurator requires Jackson-annotated Config objects to have field annotations. %s doesn't", clazz ) diff --git a/api/src/main/java/io/druid/guice/PolyBind.java b/api/src/main/java/io/druid/guice/PolyBind.java index a76decabc38..f6466e69365 100644 --- a/api/src/main/java/io/druid/guice/PolyBind.java +++ b/api/src/main/java/io/druid/guice/PolyBind.java @@ -30,6 +30,7 @@ import com.google.inject.TypeLiteral; import com.google.inject.binder.ScopedBindingBuilder; import com.google.inject.multibindings.MapBinder; import com.google.inject.util.Types; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.lang.reflect.ParameterizedType; @@ -188,7 +189,7 @@ public class PolyBind if (implName == null) { if (defaultPropertyValue == null) { if (defaultKey == null) { - throw new ProvisionException(String.format("Some value must be configured for [%s]", key)); + throw new ProvisionException(StringUtils.format("Some value must be configured for [%s]", key)); } return injector.getInstance(defaultKey); } @@ -198,7 +199,7 @@ public class PolyBind if (provider == null) { throw new ProvisionException( - String.format("Unknown provider[%s] of %s, known options[%s]", implName, key, implsMap.keySet()) + StringUtils.format("Unknown provider[%s] of %s, known options[%s]", implName, key, implsMap.keySet()) ); } diff --git a/api/src/main/java/io/druid/segment/SegmentUtils.java b/api/src/main/java/io/druid/segment/SegmentUtils.java index 88a28095f43..f1b495f9a13 100644 --- a/api/src/main/java/io/druid/segment/SegmentUtils.java +++ b/api/src/main/java/io/druid/segment/SegmentUtils.java @@ -21,6 +21,7 @@ package io.druid.segment; import com.google.common.io.Files; import com.google.common.primitives.Ints; +import io.druid.java.util.common.IOE; import java.io.File; import java.io.FileInputStream; @@ -47,11 +48,6 @@ public class SegmentUtils return version; } - throw new IOException( - String.format( - "Invalid segment dir [%s]. Can't find either of version.bin or index.drd.", - inDir - ) - ); + throw new IOE("Invalid segment dir [%s]. Can't find either of version.bin or index.drd.", inDir); } } diff --git a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java index 9638ed48b15..07386469ae4 100644 --- a/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java +++ b/api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java @@ -20,6 +20,7 @@ package io.druid.segment.loading; import com.google.common.base.Joiner; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import java.io.File; @@ -41,7 +42,7 @@ public interface DataSegmentPusher return getDefaultStorageDir(dataSegment); } default String makeIndexPathName(DataSegment dataSegment, String indexName) { - return String.format("./%s/%s", getStorageDir(dataSegment),indexName); + return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName); } // Note: storage directory structure format = .../dataSource/interval/version/partitionNumber/ @@ -51,11 +52,7 @@ public interface DataSegmentPusher static String getDefaultStorageDir(DataSegment segment) { return JOINER.join( segment.getDataSource(), - String.format( - "%s_%s", - segment.getInterval().getStart(), - segment.getInterval().getEnd() - ), + StringUtils.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()), segment.getVersion(), segment.getShardSpec().getPartitionNum() ); diff --git a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java index 061375c9c18..2ac69944a74 100644 --- a/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java +++ b/api/src/main/java/io/druid/segment/loading/SegmentLoadingException.java @@ -19,6 +19,8 @@ package io.druid.segment.loading; +import io.druid.java.util.common.StringUtils; + /** */ public class SegmentLoadingException extends Exception @@ -28,7 +30,7 @@ public class SegmentLoadingException extends Exception Object... objs ) { - super(String.format(formatString, objs)); + super(StringUtils.nonStrictFormat(formatString, objs)); } public SegmentLoadingException( @@ -37,6 +39,6 @@ public class SegmentLoadingException extends Exception Object... objs ) { - super(String.format(formatString, objs), cause); + super(StringUtils.nonStrictFormat(formatString, objs), cause); } } diff --git a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java index aa110d11d02..357892a776d 100644 --- a/api/src/main/java/io/druid/timeline/DataSegmentUtils.java +++ b/api/src/main/java/io/druid/timeline/DataSegmentUtils.java @@ -20,10 +20,9 @@ package io.druid.timeline; import com.google.common.base.Function; - import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; - import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.format.DateTimeFormatter; @@ -73,7 +72,7 @@ public class DataSegmentUtils */ public static SegmentIdentifierParts valueOf(String dataSource, String identifier) { - if (!identifier.startsWith(String.format("%s_", dataSource))) { + if (!identifier.startsWith(StringUtils.format("%s_", dataSource))) { return null; } diff --git a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java index 65c2d32609b..f66a14f420b 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.ByteSink; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkColumnValueGenerator; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.ValueType; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -48,6 +49,7 @@ import java.util.Map; public class FloatCompressionBenchmarkFileGenerator { + private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class); public static final int ROW_NUM = 5000000; public static final List compressions = ImmutableList.of( @@ -143,7 +145,7 @@ public class FloatCompressionBenchmarkFileGenerator for (Map.Entry entry : generators.entrySet()) { for (CompressedObjectStrategy.CompressionStrategy compression : compressions) { String name = entry.getKey() + "-" + compression.toString(); - System.out.print(name + ": "); + log.info("%s: ", name); File compFile = new File(dir, name); compFile.delete(); File dataFile = new File(dir, entry.getKey()); @@ -184,7 +186,7 @@ public class FloatCompressionBenchmarkFileGenerator iopeon.close(); br.close(); } - System.out.print(compFile.length() / 1024 + "\n"); + log.info("%d", compFile.length() / 1024); } } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index 02c08a3b3f8..924fc2e553f 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -22,6 +22,7 @@ package io.druid.benchmark; import com.google.common.collect.ImmutableMap; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -66,14 +67,14 @@ public class IncrementalIndexRowTypeBenchmark for (int i = 0; i < dimensionCount; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); ingestAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); } @@ -85,7 +86,7 @@ public class IncrementalIndexRowTypeBenchmark List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, rng.nextLong()); } @@ -97,7 +98,7 @@ public class IncrementalIndexRowTypeBenchmark List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, rng.nextFloat()); } @@ -109,7 +110,7 @@ public class IncrementalIndexRowTypeBenchmark List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, String.valueOf(rng.nextLong())); } diff --git a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java index 08d646560a8..bd00dfa62c0 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/io/druid/benchmark/LongCompressionBenchmarkFileGenerator.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.ByteSink; import io.druid.benchmark.datagen.BenchmarkColumnSchema; import io.druid.benchmark.datagen.BenchmarkColumnValueGenerator; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.ValueType; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -48,6 +49,7 @@ import java.util.Map; public class LongCompressionBenchmarkFileGenerator { + private static final Logger log = new Logger(LongCompressionBenchmarkFileGenerator.class); public static final int ROW_NUM = 5000000; public static final List compressions = ImmutableList.of(CompressedObjectStrategy.CompressionStrategy.LZ4, @@ -135,7 +137,7 @@ public class LongCompressionBenchmarkFileGenerator for (CompressedObjectStrategy.CompressionStrategy compression : compressions) { for (CompressionFactory.LongEncodingStrategy encoding : encodings) { String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString(); - System.out.print(name + ": "); + log.info("%s: ", name); File compFile = new File(dir, name); compFile.delete(); File dataFile = new File(dir, entry.getKey()); @@ -177,7 +179,7 @@ public class LongCompressionBenchmarkFileGenerator iopeon.close(); br.close(); } - System.out.print(compFile.length() / 1024 + "\n"); + log.info("%d", compFile.length() / 1024); } } } diff --git a/benchmarks/src/main/java/io/druid/benchmark/VSizeSerdeBenchmark.java b/benchmarks/src/main/java/io/druid/benchmark/VSizeSerdeBenchmark.java index 6c54162b169..d0e5a2174d9 100644 --- a/benchmarks/src/main/java/io/druid/benchmark/VSizeSerdeBenchmark.java +++ b/benchmarks/src/main/java/io/druid/benchmark/VSizeSerdeBenchmark.java @@ -20,6 +20,7 @@ package io.druid.benchmark; import com.google.common.io.Files; +import io.druid.java.util.common.logger.Logger; import io.druid.segment.data.VSizeLongSerde; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -50,6 +51,7 @@ import java.util.concurrent.TimeUnit; @OutputTimeUnit(TimeUnit.MILLISECONDS) public class VSizeSerdeBenchmark { + private static final Logger log = new Logger(VSizeSerdeBenchmark.class); @Param({"500000"}) private int values; @@ -102,7 +104,7 @@ public class VSizeSerdeBenchmark public void tearDown() { dummy.delete(); - System.out.println(sum); + log.info("%d", sum); } @Benchmark diff --git a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/ConciseBitmapFactory.java b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/ConciseBitmapFactory.java index 3416cd82786..4798b890483 100755 --- a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/ConciseBitmapFactory.java +++ b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/ConciseBitmapFactory.java @@ -21,6 +21,7 @@ package io.druid.collections.bitmap; import com.google.common.collect.Iterables; import io.druid.extendedset.intset.ImmutableConciseSet; +import io.druid.java.util.common.ISE; import java.nio.ByteBuffer; import java.util.Collection; @@ -92,7 +93,7 @@ public class ConciseBitmapFactory implements BitmapFactory public ImmutableBitmap makeImmutableBitmap(MutableBitmap mutableBitmap) { if (!(mutableBitmap instanceof WrappedConciseBitmap)) { - throw new IllegalStateException(String.format("Cannot convert [%s]", mutableBitmap.getClass())); + throw new ISE("Cannot convert [%s]", mutableBitmap.getClass()); } return new WrappedImmutableConciseBitmap( ImmutableConciseSet.newImmutableFromMutable( diff --git a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/RoaringBitmapFactory.java b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/RoaringBitmapFactory.java index ea093b3a235..3d184fa06b3 100755 --- a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/RoaringBitmapFactory.java +++ b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/RoaringBitmapFactory.java @@ -21,6 +21,7 @@ package io.druid.collections.bitmap; import com.google.common.base.Throwables; import com.google.common.collect.Iterables; +import io.druid.java.util.common.ISE; import org.roaringbitmap.RoaringBitmap; import org.roaringbitmap.buffer.BufferFastAggregation; import org.roaringbitmap.buffer.ImmutableRoaringBitmap; @@ -125,7 +126,7 @@ public class RoaringBitmapFactory implements BitmapFactory public ImmutableBitmap makeImmutableBitmap(MutableBitmap mutableBitmap) { if (!(mutableBitmap instanceof WrappedRoaringBitmap)) { - throw new IllegalStateException(String.format("Cannot convert [%s]", mutableBitmap.getClass())); + throw new ISE("Cannot convert [%s]", mutableBitmap.getClass()); } try { return ((WrappedRoaringBitmap) mutableBitmap).toImmutableBitmap(); diff --git a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedBitSetBitmap.java b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedBitSetBitmap.java index 3781f335aaf..21d070d0421 100755 --- a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedBitSetBitmap.java +++ b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedBitSetBitmap.java @@ -19,6 +19,8 @@ package io.druid.collections.bitmap; +import io.druid.java.util.common.IAE; + import java.nio.ByteBuffer; import java.util.BitSet; @@ -61,12 +63,10 @@ public class WrappedBitSetBitmap extends WrappedImmutableBitSetBitmap implements WrappedBitSetBitmap bitSet = (WrappedBitSetBitmap) mutableBitmap; this.bitmap.or(bitSet.bitmap); } else { - throw new IllegalArgumentException( - String.format( - "Unknown class type: %s expected %s", - mutableBitmap.getClass().getCanonicalName(), - WrappedBitSetBitmap.class.getCanonicalName() - ) + throw new IAE( + "Unknown class type: %s expected %s", + mutableBitmap.getClass().getCanonicalName(), + WrappedBitSetBitmap.class.getCanonicalName() ); } } @@ -78,12 +78,10 @@ public class WrappedBitSetBitmap extends WrappedImmutableBitSetBitmap implements WrappedBitSetBitmap bitSet = (WrappedBitSetBitmap) mutableBitmap; this.bitmap.and(bitSet.bitmap); } else { - throw new IllegalArgumentException( - String.format( - "Unknown class type: %s expected %s", - mutableBitmap.getClass().getCanonicalName(), - WrappedBitSetBitmap.class.getCanonicalName() - ) + throw new IAE( + "Unknown class type: %s expected %s", + mutableBitmap.getClass().getCanonicalName(), + WrappedBitSetBitmap.class.getCanonicalName() ); } } @@ -95,12 +93,10 @@ public class WrappedBitSetBitmap extends WrappedImmutableBitSetBitmap implements WrappedBitSetBitmap bitSet = (WrappedBitSetBitmap) mutableBitmap; this.bitmap.xor(bitSet.bitmap); } else { - throw new IllegalArgumentException( - String.format( - "Unknown class type: %s expected %s", - mutableBitmap.getClass().getCanonicalName(), - WrappedBitSetBitmap.class.getCanonicalName() - ) + throw new IAE( + "Unknown class type: %s expected %s", + mutableBitmap.getClass().getCanonicalName(), + WrappedBitSetBitmap.class.getCanonicalName() ); } } @@ -112,12 +108,10 @@ public class WrappedBitSetBitmap extends WrappedImmutableBitSetBitmap implements WrappedBitSetBitmap bitSet = (WrappedBitSetBitmap) mutableBitmap; this.bitmap.andNot(bitSet.bitmap); } else { - throw new IllegalArgumentException( - String.format( - "Unknown class type: %s expected %s", - mutableBitmap.getClass().getCanonicalName(), - WrappedBitSetBitmap.class.getCanonicalName() - ) + throw new IAE( + "Unknown class type: %s expected %s", + mutableBitmap.getClass().getCanonicalName(), + WrappedBitSetBitmap.class.getCanonicalName() ); } } diff --git a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedRoaringBitmap.java b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedRoaringBitmap.java index eeb7bdb0e26..0ca43c7427f 100755 --- a/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedRoaringBitmap.java +++ b/bytebuffer-collections/src/main/java/io/druid/collections/bitmap/WrappedRoaringBitmap.java @@ -205,7 +205,7 @@ public class WrappedRoaringBitmap implements MutableBitmap ); } catch (IOException e) { - e.printStackTrace(); // impossible in theory + throw new RuntimeException(e); // impossible in theory } } diff --git a/bytebuffer-collections/src/main/java/io/druid/collections/spatial/RTreeUtils.java b/bytebuffer-collections/src/main/java/io/druid/collections/spatial/RTreeUtils.java index e5c00b64285..572c0e28f5a 100755 --- a/bytebuffer-collections/src/main/java/io/druid/collections/spatial/RTreeUtils.java +++ b/bytebuffer-collections/src/main/java/io/druid/collections/spatial/RTreeUtils.java @@ -19,17 +19,12 @@ package io.druid.collections.spatial; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Function; import com.google.common.base.Preconditions; -import com.google.common.base.Throwables; -import com.google.common.collect.Iterables; /** */ public class RTreeUtils { - private static ObjectMapper jsonMapper = new ObjectMapper(); public static double getEnclosingArea(Node a, Node b) { @@ -76,177 +71,4 @@ public class RTreeUtils } } - public static Iterable getBitmaps(ImmutableRTree tree) - { - return depthFirstSearch(tree.getRoot()); - } - - public static Iterable depthFirstSearch(ImmutableNode node) - { - if (node.isLeaf()) { - return Iterables.transform( - node.getChildren(), - new Function() - { - @Override - public ImmutablePoint apply(ImmutableNode tNode) - { - return new ImmutablePoint(tNode); - } - } - ); - } else { - return Iterables.concat( - Iterables.transform( - - node.getChildren(), - new Function>() - { - @Override - public Iterable apply(ImmutableNode child) - { - return depthFirstSearch(child); - } - } - ) - ); - } - } - - public static void print(RTree tree) - { - System.out.printf("numDims : %d%n", tree.getNumDims()); - try { - printRTreeNode(tree.getRoot(), 0); - } - catch (Exception e) { - throw Throwables.propagate(e); - } - } - - public static void print(ImmutableRTree tree) - { - System.out.printf("numDims : %d%n", tree.getNumDims()); - try { - printNode(tree.getRoot(), 0); - } - catch (Exception e) { - throw Throwables.propagate(e); - } - } - - public static void printRTreeNode(Node node, int level) throws Exception - { - System.out.printf( - "%sminCoords: %s, maxCoords: %s, numChildren: %d, isLeaf:%s%n", - makeDashes(level), - jsonMapper.writeValueAsString(node.getMinCoordinates()), - jsonMapper.writeValueAsString( - node.getMaxCoordinates() - ), - node.getChildren().size(), - node.isLeaf() - ); - if (node.isLeaf()) { - for (Node child : node.getChildren()) { - Point point = (Point) (child); - System.out - .printf( - "%scoords: %s, conciseSet: %s%n", - makeDashes(level), - jsonMapper.writeValueAsString(point.getCoords()), - point.getBitmap() - ); - } - } else { - level++; - for (Node child : node.getChildren()) { - printRTreeNode(child, level); - } - } - } - - public static boolean verifyEnclose(Node node) - { - for (Node child : node.getChildren()) { - for (int i = 0; i < node.getNumDims(); i++) { - if (child.getMinCoordinates()[i] < node.getMinCoordinates()[i] - || child.getMaxCoordinates()[i] > node.getMaxCoordinates()[i]) { - return false; - } - } - } - - if (!node.isLeaf()) { - for (Node child : node.getChildren()) { - if (!verifyEnclose(child)) { - return false; - } - } - } - - return true; - } - - public static boolean verifyEnclose(ImmutableNode node) - { - for (ImmutableNode child : node.getChildren()) { - for (int i = 0; i < node.getNumDims(); i++) { - if (child.getMinCoordinates()[i] < node.getMinCoordinates()[i] - || child.getMaxCoordinates()[i] > node.getMaxCoordinates()[i]) { - return false; - } - } - } - - if (!node.isLeaf()) { - for (ImmutableNode child : node.getChildren()) { - if (!verifyEnclose(child)) { - return false; - } - } - } - - return true; - } - - private static void printNode(ImmutableNode node, int level) throws Exception - { - System.out.printf( - "%sminCoords: %s, maxCoords: %s, numChildren: %d, isLeaf: %s%n", - makeDashes(level), - jsonMapper.writeValueAsString(node.getMinCoordinates()), - jsonMapper.writeValueAsString( - node.getMaxCoordinates() - ), - node.getNumChildren(), - node.isLeaf() - ); - if (node.isLeaf()) { - for (ImmutableNode immutableNode : node.getChildren()) { - ImmutablePoint point = new ImmutablePoint(immutableNode); - System.out - .printf( - "%scoords: %s, conciseSet: %s%n", - makeDashes(level), - jsonMapper.writeValueAsString(point.getCoords()), - point.getImmutableBitmap() - ); - } - } else { - level++; - for (ImmutableNode immutableNode : node.getChildren()) { - printNode(immutableNode, level); - } - } - } - - private static String makeDashes(int level) - { - String retVal = ""; - for (int i = 0; i < level; i++) { - retVal += "-"; - } - return retVal; - } } diff --git a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java index 617e4ea6de0..93ee6eb9c90 100755 --- a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java +++ b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/BitmapBenchmark.java @@ -19,12 +19,11 @@ package io.druid.collections.bitmap; -import java.io.ByteArrayOutputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Random; - +import com.carrotsearch.junitbenchmarks.BenchmarkOptions; +import com.carrotsearch.junitbenchmarks.BenchmarkRule; +import com.carrotsearch.junitbenchmarks.Clock; +import com.google.common.collect.Lists; +import io.druid.extendedset.intset.ImmutableConciseSet; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; @@ -33,12 +32,12 @@ import org.roaringbitmap.buffer.BufferFastAggregation; import org.roaringbitmap.buffer.ImmutableRoaringBitmap; import org.roaringbitmap.buffer.MutableRoaringBitmap; -import com.carrotsearch.junitbenchmarks.BenchmarkOptions; -import com.carrotsearch.junitbenchmarks.BenchmarkRule; -import com.carrotsearch.junitbenchmarks.Clock; -import com.google.common.collect.Lists; - -import io.druid.extendedset.intset.ImmutableConciseSet; +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Locale; +import java.util.Random; @BenchmarkOptions(clock = Clock.NANO_TIME, benchmarkRounds = 50) @@ -102,11 +101,12 @@ public class BitmapBenchmark System.out.println(""); System.out.println("## " + name); System.out.println(""); - System.out.printf(" d = %06.5f | Concise | Roaring" + System.lineSeparator(), density); + System.out.printf(Locale.ENGLISH, " d = %06.5f | Concise | Roaring%n", density); System.out.println("-------------|---------|---------"); - System.out.printf("Count | %5d | %5d " + System.lineSeparator(), conciseCount, roaringCount); + System.out.printf(Locale.ENGLISH, "Count | %5d | %5d %n", conciseCount, roaringCount); System.out.printf( - "Average size | %5d | %5d " + System.lineSeparator(), + Locale.ENGLISH, + "Average size | %5d | %5d %n", totalConciseBytes / conciseCount, totalRoaringBytes / roaringCount ); diff --git a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/RangeBitmapBenchmarkTest.java b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/RangeBitmapBenchmarkTest.java index 5549c85dac0..4c5d3769c9a 100755 --- a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/RangeBitmapBenchmarkTest.java +++ b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/RangeBitmapBenchmarkTest.java @@ -19,18 +19,17 @@ package io.druid.collections.bitmap; -import java.util.BitSet; - +import com.carrotsearch.junitbenchmarks.annotation.BenchmarkHistoryChart; +import com.carrotsearch.junitbenchmarks.annotation.LabelType; +import io.druid.extendedset.intset.ConciseSet; +import io.druid.extendedset.intset.ImmutableConciseSet; +import io.druid.java.util.common.StringUtils; +import io.druid.test.annotation.Benchmark; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; import org.roaringbitmap.buffer.MutableRoaringBitmap; -import com.carrotsearch.junitbenchmarks.annotation.BenchmarkHistoryChart; -import com.carrotsearch.junitbenchmarks.annotation.LabelType; - -import io.druid.test.annotation.Benchmark; -import io.druid.extendedset.intset.ConciseSet; -import io.druid.extendedset.intset.ImmutableConciseSet; +import java.util.BitSet; @Category({Benchmark.class}) @BenchmarkHistoryChart(labelWith = LabelType.CUSTOM_KEY, maxRuns = 20) @@ -43,7 +42,7 @@ public class RangeBitmapBenchmarkTest extends BitmapBenchmark @BeforeClass public static void prepareRandomRanges() throws Exception { - System.setProperty("jub.customkey", String.format("%06.5f", DENSITY)); + System.setProperty("jub.customkey", StringUtils.format("%06.5f", DENSITY)); reset(); final BitSet expectedUnion = new BitSet(); diff --git a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/UniformBitmapBenchmarkTest.java b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/UniformBitmapBenchmarkTest.java index 6707a6c015a..6a043817889 100755 --- a/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/UniformBitmapBenchmarkTest.java +++ b/bytebuffer-collections/src/test/java/io/druid/collections/bitmap/UniformBitmapBenchmarkTest.java @@ -19,18 +19,17 @@ package io.druid.collections.bitmap; -import java.util.BitSet; - +import com.carrotsearch.junitbenchmarks.annotation.BenchmarkHistoryChart; +import com.carrotsearch.junitbenchmarks.annotation.LabelType; +import io.druid.extendedset.intset.ConciseSet; +import io.druid.extendedset.intset.ImmutableConciseSet; +import io.druid.java.util.common.StringUtils; +import io.druid.test.annotation.Benchmark; import org.junit.BeforeClass; import org.junit.experimental.categories.Category; import org.roaringbitmap.buffer.MutableRoaringBitmap; -import com.carrotsearch.junitbenchmarks.annotation.BenchmarkHistoryChart; -import com.carrotsearch.junitbenchmarks.annotation.LabelType; - -import io.druid.test.annotation.Benchmark; -import io.druid.extendedset.intset.ConciseSet; -import io.druid.extendedset.intset.ImmutableConciseSet; +import java.util.BitSet; @Category({Benchmark.class}) @BenchmarkHistoryChart(labelWith = LabelType.CUSTOM_KEY, maxRuns = 20) @@ -43,7 +42,7 @@ public class UniformBitmapBenchmarkTest extends BitmapBenchmark @BeforeClass public static void prepareMostlyUniform() throws Exception { - System.setProperty("jub.customkey", String.format("%05.4f", DENSITY)); + System.setProperty("jub.customkey", StringUtils.format("%05.4f", DENSITY)); reset(); final BitSet expectedUnion = new BitSet(); diff --git a/bytebuffer-collections/src/test/java/io/druid/collections/spatial/ImmutableRTreeTest.java b/bytebuffer-collections/src/test/java/io/druid/collections/spatial/ImmutableRTreeTest.java index 4ae8bc74431..bbc571f03fd 100755 --- a/bytebuffer-collections/src/test/java/io/druid/collections/spatial/ImmutableRTreeTest.java +++ b/bytebuffer-collections/src/test/java/io/druid/collections/spatial/ImmutableRTreeTest.java @@ -36,6 +36,7 @@ import org.junit.Test; import org.roaringbitmap.IntIterator; import java.nio.ByteBuffer; +import java.util.Locale; import java.util.Random; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -563,13 +564,13 @@ public class ImmutableRTreeTest tree.insert(new float[]{(float) (rand.nextDouble() * 100), (float) (rand.nextDouble() * 100)}, i); } long stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: insert = %,d ms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: insert = %,d ms%n", numPoints, stop); stopwatch.reset().start(); ImmutableRTree searchTree = ImmutableRTree.newImmutableFromMutable(tree); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: size = %,d bytes%n", numPoints, searchTree.toBytes().length); - System.out.printf("[%,d]: buildImmutable = %,d ms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: size = %,d bytes%n", numPoints, searchTree.toBytes().length); + System.out.printf(Locale.ENGLISH, "[%,d]: buildImmutable = %,d ms%n", numPoints, stop); stopwatch.reset().start(); @@ -578,14 +579,14 @@ public class ImmutableRTreeTest Iterables.size(points); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: search = %,dms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: search = %,dms%n", numPoints, stop); stopwatch.reset().start(); ImmutableBitmap finalSet = bf.union(points); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: union of %,d points in %,d ms%n", numPoints, finalSet.size(), stop); + System.out.printf(Locale.ENGLISH, "[%,d]: union of %,d points in %,d ms%n", numPoints, finalSet.size(), stop); } catch (Exception e) { throw Throwables.propagate(e); @@ -613,13 +614,13 @@ public class ImmutableRTreeTest tree.insert(new float[]{(float) (rand.nextDouble() * 100), (float) (rand.nextDouble() * 100)}, i); } long stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: insert = %,d ms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: insert = %,d ms%n", numPoints, stop); stopwatch.reset().start(); ImmutableRTree searchTree = ImmutableRTree.newImmutableFromMutable(tree); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: size = %,d bytes%n", numPoints, searchTree.toBytes().length); - System.out.printf("[%,d]: buildImmutable = %,d ms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: size = %,d bytes%n", numPoints, searchTree.toBytes().length); + System.out.printf(Locale.ENGLISH, "[%,d]: buildImmutable = %,d ms%n", numPoints, stop); stopwatch.reset().start(); @@ -634,14 +635,14 @@ public class ImmutableRTreeTest Iterables.size(points); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: search = %,dms%n", numPoints, stop); + System.out.printf(Locale.ENGLISH, "[%,d]: search = %,dms%n", numPoints, stop); stopwatch.reset().start(); ImmutableBitmap finalSet = bf.union(points); stop = stopwatch.elapsed(TimeUnit.MILLISECONDS); - System.out.printf("[%,d]: union of %,d points in %,d ms%n", numPoints, finalSet.size(), stop); + System.out.printf(Locale.ENGLISH, "[%,d]: union of %,d points in %,d ms%n", numPoints, finalSet.size(), stop); } catch (Exception e) { throw Throwables.propagate(e); diff --git a/codestyle/checkstyle-suppressions.xml b/codestyle/checkstyle-suppressions.xml index 30eca237c0b..57f6482954b 100644 --- a/codestyle/checkstyle-suppressions.xml +++ b/codestyle/checkstyle-suppressions.xml @@ -33,6 +33,10 @@ + + + + diff --git a/common/src/main/java/io/druid/common/config/Log4jShutdown.java b/common/src/main/java/io/druid/common/config/Log4jShutdown.java index 3aa41e372ac..fc30acfe572 100644 --- a/common/src/main/java/io/druid/common/config/Log4jShutdown.java +++ b/common/src/main/java/io/druid/common/config/Log4jShutdown.java @@ -20,6 +20,7 @@ package io.druid.common.config; import com.google.common.base.Throwables; +import io.druid.java.util.common.ISE; import org.apache.logging.log4j.core.util.Cancellable; import org.apache.logging.log4j.core.util.ShutdownCallbackRegistry; @@ -90,7 +91,7 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, org.apache.loggi public void start() { if (!state.compareAndSet(State.INITIALIZED, State.STARTED)) { // Skip STARTING - throw new IllegalStateException(String.format("Expected state [%s] found [%s]", State.INITIALIZED, state.get())); + throw new ISE("Expected state [%s] found [%s]", State.INITIALIZED, state.get()); } } @@ -100,7 +101,7 @@ public class Log4jShutdown implements ShutdownCallbackRegistry, org.apache.loggi if (!state.compareAndSet(State.STARTED, State.STOPPING)) { State current = state.waitForTransition(State.STOPPING, State.STOPPED, SHUTDOWN_WAIT_TIMEOUT); if (current != State.STOPPED) { - throw new IllegalStateException(String.format("Expected state [%s] found [%s]", State.STARTED, current)); + throw new ISE("Expected state [%s] found [%s]", State.STARTED, current); } return; } diff --git a/common/src/main/java/io/druid/common/guava/GuavaUtils.java b/common/src/main/java/io/druid/common/guava/GuavaUtils.java index d81ea621df1..4302d26149e 100644 --- a/common/src/main/java/io/druid/common/guava/GuavaUtils.java +++ b/common/src/main/java/io/druid/common/guava/GuavaUtils.java @@ -26,6 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.io.CharStreams; import com.google.common.io.InputSupplier; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.io.BufferedReader; @@ -49,7 +50,7 @@ public class GuavaUtils @Override public String apply(@Nullable String input) { - return String.format(formatString, input); + return StringUtils.format(formatString, input); } }; } diff --git a/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java b/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java index 120c4536b57..48e105426e2 100644 --- a/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java +++ b/common/src/main/java/io/druid/common/utils/ServletResourceUtils.java @@ -47,6 +47,6 @@ public class ServletResourceUtils */ public static Map jsonize(String msgFormat, Object... args) { - return ImmutableMap.of("error", StringUtils.safeFormat(msgFormat, args)); + return ImmutableMap.of("error", StringUtils.nonStrictFormat(msgFormat, args)); } } diff --git a/common/src/main/java/io/druid/common/utils/VMUtils.java b/common/src/main/java/io/druid/common/utils/VMUtils.java index 6fd5596f522..1f09ede3bf7 100644 --- a/common/src/main/java/io/druid/common/utils/VMUtils.java +++ b/common/src/main/java/io/druid/common/utils/VMUtils.java @@ -19,6 +19,8 @@ package io.druid.common.utils; +import io.druid.java.util.common.UOE; + import java.lang.management.ManagementFactory; import java.lang.management.ThreadMXBean; import java.lang.reflect.InvocationTargetException; @@ -63,12 +65,7 @@ public class VMUtils Object maxDirectMemoryObj = vmClass.getMethod("maxDirectMemory").invoke(null); if (maxDirectMemoryObj == null || !(maxDirectMemoryObj instanceof Number)) { - throw new UnsupportedOperationException( - String.format( - "Cannot determine maxDirectMemory from [%s]", - maxDirectMemoryObj - ) - ); + throw new UOE("Cannot determine maxDirectMemory from [%s]", maxDirectMemoryObj); } else { return ((Number) maxDirectMemoryObj).longValue(); } diff --git a/common/src/main/java/io/druid/math/expr/ExprMacroTable.java b/common/src/main/java/io/druid/math/expr/ExprMacroTable.java index ae41629e50a..6a93510b81c 100644 --- a/common/src/main/java/io/druid/math/expr/ExprMacroTable.java +++ b/common/src/main/java/io/druid/math/expr/ExprMacroTable.java @@ -19,6 +19,8 @@ package io.druid.math.expr; +import io.druid.java.util.common.StringUtils; + import javax.annotation.Nullable; import java.util.Collections; import java.util.List; @@ -35,7 +37,7 @@ public class ExprMacroTable { this.macroMap = macros.stream().collect( Collectors.toMap( - m -> m.name().toLowerCase(), + m -> StringUtils.toLowerCase(m.name()), m -> m ) ); @@ -58,7 +60,7 @@ public class ExprMacroTable @Nullable public Expr get(final String functionName, final List args) { - final ExprMacro exprMacro = macroMap.get(functionName.toLowerCase()); + final ExprMacro exprMacro = macroMap.get(StringUtils.toLowerCase(functionName)); if (exprMacro == null) { return null; } diff --git a/common/src/main/java/io/druid/math/expr/Function.java b/common/src/main/java/io/druid/math/expr/Function.java index 52739d42f3c..df226372080 100644 --- a/common/src/main/java/io/druid/math/expr/Function.java +++ b/common/src/main/java/io/druid/math/expr/Function.java @@ -21,6 +21,7 @@ package io.druid.math.expr; import com.google.common.base.Strings; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; @@ -793,7 +794,7 @@ interface Function { ExprType castTo; try { - castTo = ExprType.valueOf(y.asString().toUpperCase()); + castTo = ExprType.valueOf(StringUtils.toUpperCase(y.asString())); } catch (IllegalArgumentException e) { throw new IAE("invalid type '%s'", y.asString()); diff --git a/common/src/main/java/io/druid/math/expr/Parser.java b/common/src/main/java/io/druid/math/expr/Parser.java index 27ea8b64037..7d01961171d 100644 --- a/common/src/main/java/io/druid/math/expr/Parser.java +++ b/common/src/main/java/io/druid/math/expr/Parser.java @@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.math.expr.antlr.ExprLexer; import io.druid.math.expr.antlr.ExprParser; @@ -50,7 +51,7 @@ public class Parser if (!Modifier.isAbstract(clazz.getModifiers()) && Function.class.isAssignableFrom(clazz)) { try { Function function = (Function) clazz.newInstance(); - functionMap.put(function.name().toLowerCase(), function); + functionMap.put(StringUtils.toLowerCase(function.name()), function); } catch (Exception e) { log.info("failed to instantiate " + clazz.getName() + ".. ignoring", e); @@ -62,7 +63,7 @@ public class Parser public static Function getFunction(String name) { - return FUNCTIONS.get(name.toLowerCase()); + return FUNCTIONS.get(StringUtils.toLowerCase(name)); } public static Expr parse(String in, ExprMacroTable macroTable) diff --git a/common/src/main/java/io/druid/metadata/EntryExistsException.java b/common/src/main/java/io/druid/metadata/EntryExistsException.java index f80f139d75d..7f65f1a495f 100644 --- a/common/src/main/java/io/druid/metadata/EntryExistsException.java +++ b/common/src/main/java/io/druid/metadata/EntryExistsException.java @@ -19,13 +19,15 @@ package io.druid.metadata; +import io.druid.java.util.common.StringUtils; + public class EntryExistsException extends Exception { private final String entryId; public EntryExistsException(String entryId, Throwable t) { - super(String.format("Entry already exists: %s", entryId), t); + super(StringUtils.format("Entry already exists: %s", entryId), t); this.entryId = entryId; } diff --git a/common/src/main/java/io/druid/metadata/MetadataStorageConnectorConfig.java b/common/src/main/java/io/druid/metadata/MetadataStorageConnectorConfig.java index c837ceefb1a..60f87e924a7 100644 --- a/common/src/main/java/io/druid/metadata/MetadataStorageConnectorConfig.java +++ b/common/src/main/java/io/druid/metadata/MetadataStorageConnectorConfig.java @@ -20,6 +20,7 @@ package io.druid.metadata; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.java.util.common.StringUtils; /** */ @@ -61,7 +62,7 @@ public class MetadataStorageConnectorConfig public String getConnectURI() { if (connectURI == null) { - return String.format("jdbc:derby://%s:%s/druid;create=true", host, port); + return StringUtils.format("jdbc:derby://%s:%s/druid;create=true", host, port); } return connectURI; } diff --git a/common/src/main/java/io/druid/metadata/MetadataStorageTablesConfig.java b/common/src/main/java/io/druid/metadata/MetadataStorageTablesConfig.java index af8054ff67b..83430dd4da8 100644 --- a/common/src/main/java/io/druid/metadata/MetadataStorageTablesConfig.java +++ b/common/src/main/java/io/druid/metadata/MetadataStorageTablesConfig.java @@ -22,6 +22,7 @@ package io.druid.metadata; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import java.util.Map; @@ -113,7 +114,7 @@ public class MetadataStorageTablesConfig if (base == null) { return null; } - return String.format("%s_%s", base, defaultSuffix); + return StringUtils.format("%s_%s", base, defaultSuffix); } return explicitTableName; diff --git a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java index 9e2205a4fc2..85d1f08e713 100644 --- a/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java +++ b/common/src/main/java/io/druid/timeline/VersionedIntervalTimeline.java @@ -26,6 +26,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.UOE; import io.druid.java.util.common.guava.Comparators; import io.druid.timeline.partition.ImmutablePartitionHolder; import io.druid.timeline.partition.PartitionChunk; @@ -415,13 +416,11 @@ public class VersionedIntervalTimeline implements Timel // This occurs when restoring segments timeline.remove(currKey); } else { - throw new UnsupportedOperationException( - String.format( - "Cannot add overlapping segments [%s and %s] with the same version [%s]", - currKey, - entryInterval, - entry.getVersion() - ) + throw new UOE( + "Cannot add overlapping segments [%s and %s] with the same version [%s]", + currKey, + entryInterval, + entry.getVersion() ); } } diff --git a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java index c7be90b8a2d..cc7eea323a1 100644 --- a/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java +++ b/examples/src/main/java/io/druid/examples/twitter/TwitterSpritzerFirehoseFactory.java @@ -25,12 +25,13 @@ import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; -import com.metamx.common.logger.Logger; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.InputRowParser; +import io.druid.java.util.common.StringUtils; +import io.druid.java.util.common.logger.Logger; import twitter4j.ConnectionLifeCycleListener; import twitter4j.GeoLocation; import twitter4j.HashtagEntity; @@ -195,13 +196,13 @@ public class TwitterSpritzerFirehoseFactory implements FirehoseFactory contributors = new ArrayList<>(); for (long contrib : lcontrobutors) { - contributors.add(String.format("%d", contrib)); + contributors.add(StringUtils.format("%d", contrib)); } theMap.put("contributors", contributors); @@ -346,7 +347,7 @@ public class TwitterSpritzerFirehoseFactory implements FirehoseFactory + + + io.druid + java-util + ${project.parent.version} + + com.google.guava guava diff --git a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java index 40fa797dde3..d859c77e74c 100755 --- a/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java +++ b/extendedset/src/main/java/io/druid/extendedset/intset/ConciseSet.java @@ -20,6 +20,8 @@ package io.druid.extendedset.intset; +import io.druid.java.util.common.StringUtils; + import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; @@ -2276,7 +2278,7 @@ public class ConciseSet extends AbstractIntSet implements java.io.Serializable if (bit == 0) { s.append("none"); } else { - s.append(String.format("%4d", bit - 1)); + s.append(StringUtils.format("%4d", bit - 1)); } s.append(')'); } diff --git a/extendedset/src/main/java/io/druid/extendedset/utilities/BitCount.java b/extendedset/src/main/java/io/druid/extendedset/utilities/BitCount.java deleted file mode 100755 index b6c7fa9b869..00000000000 --- a/extendedset/src/main/java/io/druid/extendedset/utilities/BitCount.java +++ /dev/null @@ -1,333 +0,0 @@ -/* - * (c) 2010 Alessandro Colantonio - * - * - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package io.druid.extendedset.utilities; - -import java.util.Random; - -/** - * Population count (a.k.a. Hamming distance) of a bitmap represented by an - * array of int. - *

- * Derived from http - * ://dalkescientific.com/writings/diary/popcnt.c - * - * @author Alessandro Colantonio - * @version $Id: BitCount.java 157 2011-11-14 14:25:15Z cocciasik $ - */ -public class BitCount -{ - /** - * Population count - * - * @param buffer array of int - * - * @return population count - */ - public static int count(int[] buffer) - { - return count(buffer, buffer.length); - } - - /** - * Population count - *

- * It counts 24 words at a time, then 3 at a time, then 1 at a time - * - * @param buffer array of int - * @param n number of elements of buffer to count - * - * @return population count - */ - public static int count(int[] buffer, int n) - { - final int n1 = n - n % 24; - final int n2 = n - n % 3; - - int cnt = 0; - int i; - for (i = 0; i < n1; i += 24) { - cnt += merging3(buffer, i); - } - for (; i < n2; i += 3) { - cnt += merging2(buffer, i); - } - cnt += popcount_fbsd2(buffer, i, n); - return cnt; - } - - // used by count() - private static int merging3(int[] buffer, int x) - { - int cnt1; - int cnt2; - int cnt = 0; - for (int i = x; i < x + 24; i += 3) { - cnt1 = buffer[i]; - cnt2 = buffer[i + 1]; - final int w = buffer[i + 2]; - cnt1 = cnt1 - ((cnt1 >>> 1) & 0x55555555) + (w & 0x55555555); - cnt2 = cnt2 - ((cnt2 >>> 1) & 0x55555555) + ((w >>> 1) & 0x55555555); - cnt1 = (cnt1 & 0x33333333) + ((cnt1 >>> 2) & 0x33333333); - cnt1 += (cnt2 & 0x33333333) + ((cnt2 >>> 2) & 0x33333333); - cnt += (cnt1 & 0x0F0F0F0F) + ((cnt1 >>> 4) & 0x0F0F0F0F); - } - cnt = (cnt & 0x00FF00FF) + ((cnt >>> 8) & 0x00FF00FF); - cnt += cnt >>> 16; - return cnt & 0x00000FFFF; - } - - // used by count() - private static int merging2(int[] buffer, int x) - { - int cnt1 = buffer[x]; - int cnt2 = buffer[x + 1]; - final int w = buffer[x + 2]; - cnt1 = cnt1 - ((cnt1 >>> 1) & 0x55555555) + (w & 0x55555555); - cnt2 = cnt2 - ((cnt2 >>> 1) & 0x55555555) + ((w >>> 1) & 0x55555555); - cnt1 = (cnt1 & 0x33333333) + ((cnt1 >>> 2) & 0x33333333); - cnt2 = (cnt2 & 0x33333333) + ((cnt2 >>> 2) & 0x33333333); - cnt1 += cnt2; - cnt1 = (cnt1 & 0x0F0F0F0F) + ((cnt1 >>> 4) & 0x0F0F0F0F); - cnt1 += cnt1 >>> 8; - cnt1 += cnt1 >>> 16; - return cnt1 & 0x000000FF; - } - - // used by count() - private static int popcount_fbsd2(int[] data, int x, int n) - { - int cnt = 0; - for (; x < n; x++) { - cnt += Integer.bitCount(data[x]); - } - return cnt; - } - - /** - * Population count, skipping words at even positions - * - * @param buffer array of int - * - * @return population count - */ - public static int count_2(int[] buffer) - { - return count_2(buffer, buffer.length); - } - - /** - * Population count, skipping words at even positions - *

- * It counts 24 words at a time, then 3 at a time, then 1 at a time - * - * @param buffer array of int - * @param n number of elements of buffer to count - * - * @return population count - */ - public static int count_2(int[] buffer, int n) - { - final int n1 = n - n % 48; - final int n2 = n - n % 6; - - int cnt = 0; - int i; - for (i = 0; i < n1; i += 48) { - cnt += merging3_2(buffer, i); - } - for (; i < n2; i += 6) { - cnt += merging2_2(buffer, i); - } - cnt += popcount_fbsd2_2(buffer, i, n); - return cnt; - } - - // used by count_2() - private static int merging3_2(int[] buffer, int x) - { - int cnt1; - int cnt2; - int cnt = 0; - for (int i = x; i < x + 48; i += 6) { - cnt1 = buffer[i + 1]; - cnt2 = buffer[i + 3]; - final int w = buffer[i + 5]; - cnt1 = cnt1 - ((cnt1 >>> 1) & 0x55555555) + (w & 0x55555555); - cnt2 = cnt2 - ((cnt2 >>> 1) & 0x55555555) + ((w >>> 1) & 0x55555555); - cnt1 = (cnt1 & 0x33333333) + ((cnt1 >>> 2) & 0x33333333); - cnt1 += (cnt2 & 0x33333333) + ((cnt2 >>> 2) & 0x33333333); - cnt += (cnt1 & 0x0F0F0F0F) + ((cnt1 >>> 4) & 0x0F0F0F0F); - } - cnt = (cnt & 0x00FF00FF) + ((cnt >>> 8) & 0x00FF00FF); - cnt += cnt >>> 16; - return cnt & 0x00000FFFF; - } - - // used by count_2() - private static int merging2_2(int[] buffer, int x) - { - int cnt1 = buffer[x + 1]; - int cnt2 = buffer[x + 3]; - final int w = buffer[x + 5]; - cnt1 = cnt1 - ((cnt1 >>> 1) & 0x55555555) + (w & 0x55555555); - cnt2 = cnt2 - ((cnt2 >>> 1) & 0x55555555) + ((w >>> 1) & 0x55555555); - cnt1 = (cnt1 & 0x33333333) + ((cnt1 >>> 2) & 0x33333333); - cnt2 = (cnt2 & 0x33333333) + ((cnt2 >>> 2) & 0x33333333); - cnt1 += cnt2; - cnt1 = (cnt1 & 0x0F0F0F0F) + ((cnt1 >>> 4) & 0x0F0F0F0F); - cnt1 += cnt1 >>> 8; - cnt1 += cnt1 >>> 16; - return cnt1 & 0x000000FF; - } - - // used by count_2() - private static int popcount_fbsd2_2(int[] data, int x, int n) - { - int cnt = 0; - for (x++; x < n; x += 2) { - cnt += Integer.bitCount(data[x]); - } - return cnt; - } - - /** - * Test - * - * @param args - */ - public static void main(String[] args) - { - final int trials = 10000; - final int maxLength = 10000; - - Random rnd = new Random(); - final int seed = rnd.nextInt(); - - System.out.print("Test correctness... "); - rnd = new Random(seed); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength)]; - for (int j = 0; j < x.length; j++) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - - int size1 = 0; - for (int j = 0; j < x.length; j++) { - size1 += Integer.bitCount(x[j]); - } - int size2 = count(x); - - if (size1 != size2) { - System.out.println("i = " + i); - System.out.println("ERRORE!"); - System.out.println(size1 + ", " + size2); - for (int j = 0; j < x.length; j++) { - System.out.format("x[%d] = %d --> %d\n", j, x[j], Integer.bitCount(x[j])); - } - return; - } - } - System.out.println("done!"); - - System.out.print("Test correctness II... "); - rnd = new Random(seed); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength << 1)]; - for (int j = 1; j < x.length; j += 2) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - - int size1 = 0; - for (int j = 1; j < x.length; j += 2) { - size1 += Integer.bitCount(x[j]); - } - int size2 = count_2(x); - - if (size1 != size2) { - System.out.println("i = " + i); - System.out.println("ERRORE!"); - System.out.println(size1 + ", " + size2); - for (int j = 1; j < x.length; j += 2) { - System.out.format("x[%d] = %d --> %d\n", j, x[j], Integer.bitCount(x[j])); - } - return; - } - } - System.out.println("done!"); - - System.out.print("Test time count(): "); - rnd = new Random(seed); - long t = System.currentTimeMillis(); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength)]; - for (int j = 0; j < x.length; j++) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - - @SuppressWarnings("unused") - int size = 0; - for (int j = 0; j < x.length; j++) { - size += Integer.bitCount(x[j]); - } - } - System.out.println(System.currentTimeMillis() - t); - - System.out.print("Test time BitCount.count(): "); - rnd = new Random(seed); - t = System.currentTimeMillis(); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength)]; - for (int j = 0; j < x.length; j++) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - count(x); - } - System.out.println(System.currentTimeMillis() - t); - - System.out.print("Test II time count(): "); - rnd = new Random(seed); - t = System.currentTimeMillis(); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength << 1)]; - for (int j = 1; j < x.length; j += 2) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - - @SuppressWarnings("unused") - int size = 0; - for (int j = 1; j < x.length; j += 2) { - size += Integer.bitCount(x[j]); - } - } - System.out.println(System.currentTimeMillis() - t); - - System.out.print("Test II time BitCount.count(): "); - rnd = new Random(seed); - t = System.currentTimeMillis(); - for (int i = 0; i < trials; i++) { - int[] x = new int[rnd.nextInt(maxLength << 1)]; - for (int j = 1; j < x.length; j += 2) { - x[j] = rnd.nextInt(Integer.MAX_VALUE); - } - count_2(x); - } - System.out.println(System.currentTimeMillis() - t); - } -} diff --git a/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetTest.java b/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetTest.java index 794e6a7cb66..2a09751f21e 100755 --- a/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetTest.java +++ b/extendedset/src/test/java/io/druid/extendedset/intset/ImmutableConciseSetTest.java @@ -17,6 +17,7 @@ package io.druid.extendedset.intset; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import junit.framework.Assert; import org.junit.Test; @@ -1338,7 +1339,7 @@ public class ImmutableConciseSetTest for (int i = 0; i < length; i++) { final int n = intIterator.next(); if (i != n) { - Assert.assertEquals(String.format("Failure at bit [%d] on length [%d]", i, length), i, n); + Assert.assertEquals(StringUtils.format("Failure at bit [%d] on length [%d]", i, length), i, n); } } NoSuchElementException ex = null; diff --git a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java index 17b08ced868..9356c27beec 100644 --- a/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java +++ b/extensions-contrib/ambari-metrics-emitter/src/main/java/io/druid/emitter/ambari/metrics/AmbariMetricsEmitter.java @@ -26,6 +26,7 @@ import com.metamx.emitter.core.Emitter; import com.metamx.emitter.core.Event; import com.metamx.emitter.service.AlertEvent; import com.metamx.emitter.service.ServiceMetricEvent; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.metrics2.sink.timeline.AbstractTimelineMetricsSink; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric; import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics; @@ -70,7 +71,7 @@ public class AmbariMetricsEmitter extends AbstractTimelineMetricsSink implements this.emitterList = emitterList; this.timelineMetricConverter = config.getDruidToTimelineEventConverter(); this.eventsQueue = new LinkedBlockingQueue<>(config.getMaxQueueSize()); - this.collectorURI = String.format( + this.collectorURI = StringUtils.format( "%s://%s:%s%s", config.getProtocol(), config.getHostname(), diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java index 0b9f7bea9af..0716c2d157e 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureDataSegmentPusher.java @@ -26,6 +26,7 @@ import com.google.inject.Inject; import com.microsoft.azure.storage.StorageException; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; @@ -88,8 +89,8 @@ public class AzureDataSegmentPusher implements DataSegmentPusher final String storageDir = this.getStorageDir(segment); return ImmutableMap.of( - "index", String.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), - "descriptor", String.format("%s/%s", storageDir, AzureStorageDruidModule.DESCRIPTOR_FILE_NAME) + "index", StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), + "descriptor", StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.DESCRIPTOR_FILE_NAME) ); } diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java index a455347930e..0ed18ae1d5c 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureStorageDruidModule.java @@ -33,6 +33,7 @@ import io.druid.guice.Binders; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.initialization.DruidModule; +import io.druid.java.util.common.StringUtils; import java.net.URISyntaxException; import java.security.InvalidKeyException; @@ -103,7 +104,7 @@ public class AzureStorageDruidModule implements DruidModule throws URISyntaxException, InvalidKeyException { CloudStorageAccount account = CloudStorageAccount.parse( - String.format( + StringUtils.format( STORAGE_CONNECTION_STRING, config.getProtocol(), config.getAccount(), diff --git a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java index 1ec0c2c7b67..790ec477710 100644 --- a/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java +++ b/extensions-contrib/azure-extensions/src/main/java/io/druid/storage/azure/AzureTaskLogs.java @@ -24,7 +24,8 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; import com.microsoft.azure.storage.StorageException; - +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; @@ -106,13 +107,13 @@ public class AzureTaskLogs implements TaskLogs { } ); } catch (StorageException | URISyntaxException e) { - throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e); + throw new IOE(e, "Failed to stream logs from: %s", taskKey); } } private String getTaskLogKey(String taskid) { - return String.format("%s/%s/log", config.getPrefix(), taskid); + return StringUtils.format("%s/%s/log", config.getPrefix(), taskid); } @Override diff --git a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java index e5913c50f5e..0f54dedf08a 100644 --- a/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/io/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -27,6 +27,7 @@ import com.google.common.io.Files; import com.microsoft.azure.storage.StorageException; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMockSupport; @@ -116,9 +117,9 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport final String storageDir = pusher.getStorageDir(dataSegment); Map paths = pusher.getAzurePaths(dataSegment); - assertEquals(String.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), paths.get("index")); + assertEquals(StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), paths.get("index")); assertEquals( - String.format("%s/%s", storageDir, AzureStorageDruidModule.DESCRIPTOR_FILE_NAME), + StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.DESCRIPTOR_FILE_NAME), paths.get("descriptor") ); } diff --git a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java index 7ef256339d8..8ee403a9484 100644 --- a/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java +++ b/extensions-contrib/druid-rocketmq/src/main/java/io/druid/firehose/rocketmq/RocketMQFirehoseFactory.java @@ -38,6 +38,7 @@ import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.Firehose; import io.druid.data.input.FirehoseFactory; import io.druid.data.input.InputRow; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; @@ -556,7 +557,7 @@ public class RocketMQFirehoseFactory implements FirehoseFactory 2) { - LOGGER.debug(String.format( + LOGGER.debug(StringUtils.format( "%s@%s is consuming the following message queues: %s", defaultMQPullConsumer.getClientIP(), defaultMQPullConsumer.getInstanceName(), diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java index bce847d8822..7f182ae9085 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleDataSegmentPusher.java @@ -27,6 +27,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; @@ -71,7 +72,7 @@ public class GoogleDataSegmentPusher implements DataSegmentPusher @Override public String getPathForHadoop() { - return String.format("gs://%s/%s", config.getBucket(), config.getPrefix()); + return StringUtils.format("gs://%s/%s", config.getBucket(), config.getPrefix()); } public File createDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment) diff --git a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java index be579253807..b3273e78aa8 100644 --- a/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java +++ b/extensions-contrib/google-extensions/src/main/java/io/druid/storage/google/GoogleTaskLogs.java @@ -23,13 +23,14 @@ import com.google.api.client.http.InputStreamContent; import com.google.common.base.Optional; import com.google.common.io.ByteSource; import com.google.inject.Inject; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import java.io.File; import java.io.FileInputStream; -import java.io.InputStream; import java.io.IOException; +import java.io.InputStream; public class GoogleTaskLogs implements TaskLogs { private static final Logger LOG = new Logger(GoogleTaskLogs.class); @@ -93,7 +94,7 @@ public class GoogleTaskLogs implements TaskLogs { } ); } catch (IOException e) { - throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e); + throw new IOE(e, "Failed to stream logs from: %s", taskKey); } } diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java index fd2755aff2f..fff7eaeeb57 100644 --- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java +++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaEightSimpleConsumerFirehoseFactory.java @@ -32,6 +32,7 @@ import io.druid.data.input.FirehoseFactoryV2; import io.druid.data.input.FirehoseV2; import io.druid.data.input.InputRow; import io.druid.firehose.kafka.KafkaSimpleConsumer.BytesMessageWithOffset; +import io.druid.java.util.common.StringUtils; import java.io.Closeable; import java.io.IOException; @@ -323,7 +324,7 @@ public class KafkaEightSimpleConsumerFirehoseFactory implements } }; thread.setDaemon(true); - thread.setName(String.format("kafka-%s-%s", topic, partitionId)); + thread.setName(StringUtils.format("kafka-%s-%s", topic, partitionId)); thread.start(); } diff --git a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java index c320b85f80a..693e1defb7d 100644 --- a/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java +++ b/extensions-contrib/kafka-eight-simpleConsumer/src/main/java/io/druid/firehose/kafka/KafkaSimpleConsumer.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.net.HostAndPort; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.logger.Logger; import kafka.api.FetchRequest; @@ -96,7 +97,7 @@ public class KafkaSimpleConsumer this.allBrokers = Collections.unmodifiableList(brokerList); this.topic = topic; this.partitionId = partitionId; - this.clientId = String.format("%s_%d_%s", topic, partitionId, clientId); + this.clientId = StringUtils.format("%s_%d_%s", topic, partitionId, clientId); this.leaderLookupClientId = clientId + "leaderLookup"; this.replicaBrokers = new ArrayList<>(); this.replicaBrokers.addAll(this.allBrokers); diff --git a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java index 93785e4ed7f..7541b7d3d8b 100644 --- a/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java +++ b/extensions-contrib/orc-extensions/src/main/java/io/druid/data/input/orc/OrcHadoopInputRowParser.java @@ -29,7 +29,7 @@ import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.ParseSpec; import io.druid.data.input.impl.TimestampSpec; -import org.apache.commons.lang.StringUtils; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.io.orc.OrcSerde; import org.apache.hadoop.hive.ql.io.orc.OrcStruct; @@ -107,13 +107,13 @@ public class OrcHadoopInputRowParser implements InputRowParser } TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString); Preconditions.checkArgument(typeInfo instanceof StructTypeInfo, - String.format("typeString should be struct type but not [%s]", typeString)); + StringUtils.format("typeString should be struct type but not [%s]", typeString)); Properties table = getTablePropertiesFromStructTypeInfo((StructTypeInfo)typeInfo); serde.initialize(new Configuration(), table); try { oip = (StructObjectInspector) serde.getObjectInspector(); } catch (SerDeException e) { - e.printStackTrace(); + throw new RuntimeException(e); } } @@ -170,7 +170,8 @@ public class OrcHadoopInputRowParser implements InputRowParser builder.append(parseSpec.getTimestampSpec().getTimestampColumn()).append(":string"); if (parseSpec.getDimensionsSpec().getDimensionNames().size() > 0) { builder.append(","); - builder.append(StringUtils.join(parseSpec.getDimensionsSpec().getDimensionNames(), ":string,")).append(":string"); + builder.append(String.join(":string,", parseSpec.getDimensionsSpec().getDimensionNames())); + builder.append(":string"); } builder.append(">"); @@ -180,17 +181,19 @@ public class OrcHadoopInputRowParser implements InputRowParser public static Properties getTablePropertiesFromStructTypeInfo(StructTypeInfo structTypeInfo) { Properties table = new Properties(); - table.setProperty("columns", StringUtils.join(structTypeInfo.getAllStructFieldNames(), ",")); - table.setProperty("columns.types", StringUtils.join( - Lists.transform(structTypeInfo.getAllStructFieldTypeInfos(), + table.setProperty("columns", String.join(",", structTypeInfo.getAllStructFieldNames())); + table.setProperty("columns.types", String.join( + ",", + Lists.transform( + structTypeInfo.getAllStructFieldTypeInfos(), new Function() { @Nullable @Override public String apply(@Nullable TypeInfo typeInfo) { return typeInfo.getTypeName(); } - }), - "," + } + ) )); return table; diff --git a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java index a83b8c9e0bc..ffb058eac76 100644 --- a/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java +++ b/extensions-contrib/orc-extensions/src/test/java/io/druid/data/input/orc/OrcIndexGeneratorJobTest.java @@ -252,7 +252,7 @@ public class OrcIndexGeneratorJobTest for (DateTime currTime = interval.getStart(); currTime.isBefore(interval.getEnd()); currTime = currTime.plusDays(1)) { Integer[][] shardInfo = shardInfoForEachSegment[segmentNum++]; File segmentOutputFolder = new File( - String.format( + StringUtils.format( "%s/%s/%s_%s/%s", config.getSchema().getIOConfig().getSegmentOutputPath(), config.getSchema().getDataSchema().getDataSource(), diff --git a/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQProducerMain.java b/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQProducerMain.java index a6c68831906..ee8884d043a 100644 --- a/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQProducerMain.java +++ b/extensions-contrib/rabbitmq/src/test/java/io/druid/examples/rabbitmq/RabbitMQProducerMain.java @@ -36,6 +36,7 @@ import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; +import java.util.Locale; import java.util.Random; /** @@ -174,11 +175,11 @@ public class RabbitMQProducerMain int interval = Integer.parseInt(cmd.getOptionValue("interval", "10")); int delay = Integer.parseInt(cmd.getOptionValue("delay", "100")); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.ENGLISH); Date stop = sdf.parse(cmd.getOptionValue("stop", sdf.format(new Date()))); Random r = new Random(); - Calendar timer = Calendar.getInstance(); + Calendar timer = Calendar.getInstance(Locale.ENGLISH); timer.setTime(sdf.parse(cmd.getOptionValue("start", "2010-01-01T00:00:00"))); String msg_template = "{\"utcdt\": \"%s\", \"wp\": %d, \"gender\": \"%s\", \"age\": %d}"; @@ -188,12 +189,12 @@ public class RabbitMQProducerMain channel.exchangeDeclare(exchange, type, durable, autoDelete, null); - do{ + do { int wp = (10 + r.nextInt(90)) * 100; String gender = r.nextBoolean() ? "male" : "female"; int age = 20 + r.nextInt(70); - String line = String.format(msg_template, sdf.format(timer.getTime()), wp, gender, age); + String line = StringUtils.format(msg_template, sdf.format(timer.getTime()), wp, gender, age); channel.basicPublish(exchange, routingKey, null, StringUtils.toUtf8(line)); @@ -202,7 +203,7 @@ public class RabbitMQProducerMain timer.add(Calendar.SECOND, interval); Thread.sleep(delay); - }while((!single && stop.after(timer.getTime()))); + } while((!single && stop.after(timer.getTime()))); connection.close(); } diff --git a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java index 52e0e814e13..97b36160025 100644 --- a/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java +++ b/extensions-contrib/sqlserver-metadata-storage/src/main/java/io/druid/metadata/storage/sqlserver/SQLServerConnector.java @@ -18,12 +18,13 @@ */ package io.druid.metadata.storage.sqlserver; -import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; - +import com.google.common.base.Supplier; +import com.google.inject.Inject; +import com.metamx.common.logger.Logger; +import io.druid.java.util.common.StringUtils; +import io.druid.metadata.MetadataStorageConnectorConfig; +import io.druid.metadata.MetadataStorageTablesConfig; +import io.druid.metadata.SQLMetadataConnector; import org.apache.commons.dbcp2.BasicDataSource; import org.skife.jdbi.v2.Binding; import org.skife.jdbi.v2.ColonPrefixNamedParamStatementRewriter; @@ -35,13 +36,11 @@ import org.skife.jdbi.v2.tweak.RewrittenStatement; import org.skife.jdbi.v2.tweak.StatementRewriter; import org.skife.jdbi.v2.util.StringMapper; -import com.google.common.base.Supplier; -import com.google.inject.Inject; -import com.metamx.common.logger.Logger; - -import io.druid.metadata.MetadataStorageConnectorConfig; -import io.druid.metadata.MetadataStorageTablesConfig; -import io.druid.metadata.SQLMetadataConnector; +import java.sql.SQLException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.regex.Pattern; @SuppressWarnings("nls") public class SQLServerConnector extends SQLMetadataConnector @@ -236,7 +235,7 @@ public class SQLServerConnector extends SQLMetadataConnector @Override public Void withHandle(Handle handle) throws Exception { - handle.createStatement(String.format( + handle.createStatement(StringUtils.format( "MERGE INTO %1$s WITH (UPDLOCK, HOLDLOCK) as target" + " USING " + " (:key, :value) as source (%2$s, %3$s)" diff --git a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroHadoopInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroHadoopInputRowParserTest.java index 6d4092e12bc..7683e31d0b1 100644 --- a/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroHadoopInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/io/druid/data/input/AvroHadoopInputRowParserTest.java @@ -21,6 +21,7 @@ package io.druid.data.input; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.io.Closeables; import com.google.common.io.Files; +import io.druid.java.util.common.StringUtils; import org.apache.avro.file.DataFileReader; import org.apache.avro.file.DataFileWriter; import org.apache.avro.file.FileReader; @@ -105,7 +106,7 @@ public class AvroHadoopInputRowParserTest // 1. read avro files into Pig pigServer = new PigServer(ExecType.LOCAL); pigServer.registerQuery( - String.format( + StringUtils.format( "A = LOAD '%s' USING %s;", someAvroDatumFile, inputStorage diff --git a/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CacheExecutorFactory.java b/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CacheExecutorFactory.java index f4ae9465d03..5979d0a662e 100644 --- a/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CacheExecutorFactory.java +++ b/extensions-core/caffeine-cache/src/main/java/io/druid/client/cache/CacheExecutorFactory.java @@ -21,6 +21,8 @@ package io.druid.client.cache; import com.fasterxml.jackson.annotation.JsonCreator; import io.druid.concurrent.Execs; +import io.druid.java.util.common.StringUtils; + import java.util.concurrent.Executor; import java.util.concurrent.ForkJoinPool; @@ -53,6 +55,6 @@ public enum CacheExecutorFactory @JsonCreator public static CacheExecutorFactory from(String str) { - return Enum.valueOf(CacheExecutorFactory.class, str.toUpperCase()); + return Enum.valueOf(CacheExecutorFactory.class, StringUtils.toUpperCase(str)); } } diff --git a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java index fd598f05a25..27393b3e3e3 100644 --- a/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java +++ b/extensions-core/druid-kerberos/src/main/java/io/druid/security/kerberos/DruidKerberosUtil.java @@ -21,7 +21,6 @@ package io.druid.security.kerberos; import com.google.common.base.Strings; import io.druid.java.util.common.ISE; -import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.conf.Configuration; @@ -40,6 +39,7 @@ import java.io.IOException; import java.net.CookieStore; import java.net.HttpCookie; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.util.List; import java.util.concurrent.locks.ReentrantLock; @@ -81,7 +81,7 @@ public class DruidKerberosUtil byte[] outToken = gssContext.initSecContext(inToken, 0, inToken.length); gssContext.dispose(); // Base64 encoded and stringified token for server - return StringUtils.fromUtf8(base64codec.encode(outToken)); + return new String(base64codec.encode(outToken), StandardCharsets.US_ASCII); } catch (GSSException | IllegalAccessException | NoSuchFieldException | ClassNotFoundException e) { throw new AuthenticationException(e); diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java index 9d7dc5b8fb1..6fba009cf86 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentFinder.java @@ -22,12 +22,11 @@ package io.druid.storage.hdfs; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Sets; import com.google.inject.Inject; - import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentFinder; import io.druid.segment.loading.SegmentLoadingException; import io.druid.timeline.DataSegment; -import org.apache.commons.lang.StringUtils; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; @@ -85,8 +84,8 @@ public class HdfsDataSegmentFinder implements DataSegmentFinder final String descriptorParts[] = path.getName().split("_"); if (descriptorParts.length == 2 && descriptorParts[1].equals("descriptor.json") - && StringUtils.isNumeric(descriptorParts[0])) { - indexZip = new Path(path.getParent(), String.format("%s_index.zip", descriptorParts[0])); + && org.apache.commons.lang.StringUtils.isNumeric(descriptorParts[0])) { + indexZip = new Path(path.getParent(), StringUtils.format("%s_index.zip", descriptorParts[0])); } else { indexZip = new Path(path.getParent(), "index.zip"); } diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java index c7a547dc34a..d910ae51b0a 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentKiller.java @@ -82,7 +82,10 @@ public class HdfsDataSegmentKiller implements DataSegmentKiller segmentPath.toString() ); } - Path descriptorPath = new Path(segmentPath.getParent(), String.format("%s_descriptor.json", zipParts[0])); + Path descriptorPath = new Path( + segmentPath.getParent(), + io.druid.java.util.common.StringUtils.format("%s_descriptor.json", zipParts[0]) + ); //delete partitionNumber_descriptor.json if (!fs.delete(descriptorPath, false)) { throw new SegmentLoadingException( diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java index b265e9b228f..76806fb6e41 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPuller.java @@ -27,6 +27,7 @@ import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; import io.druid.segment.loading.DataSegmentPuller; @@ -318,7 +319,7 @@ public class HdfsDataSegmentPuller implements DataSegmentPuller, URIDataPuller public String getVersion(URI uri) throws IOException { try { - return String.format("%d", buildFileObject(uri, config).getLastModified()); + return StringUtils.format("%d", buildFileObject(uri, config).getLastModified()); } catch (HdfsIOException ex) { throw ex.getIOException(); diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java index 25dcf4fb4f9..59a837089db 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/HdfsDataSegmentPusher.java @@ -26,6 +26,8 @@ import com.google.common.io.ByteSource; import com.google.inject.Inject; import io.druid.common.utils.UUIDUtils; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; @@ -98,7 +100,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher storageDir ); - Path tmpIndexFile = new Path(String.format( + Path tmpIndexFile = new Path(StringUtils.format( "%s/%s/%s/%s_index.zip", fullyQualifiedStorageDirectory, segment.getDataSource(), @@ -114,13 +116,13 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher final DataSegment dataSegment; try (FSDataOutputStream out = fs.create(tmpIndexFile)) { size = CompressionUtils.zip(inDir, out); - final Path outIndexFile = new Path(String.format( + final Path outIndexFile = new Path(StringUtils.format( "%s/%s/%d_index.zip", fullyQualifiedStorageDirectory, storageDir, segment.getShardSpec().getPartitionNum() )); - final Path outDescriptorFile = new Path(String.format( + final Path outDescriptorFile = new Path(StringUtils.format( "%s/%s/%d_descriptor.json", fullyQualifiedStorageDirectory, storageDir, @@ -133,7 +135,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher final Path tmpDescriptorFile = new Path( tmpIndexFile.getParent(), - String.format("%s_descriptor.json", dataSegment.getShardSpec().getPartitionNum()) + StringUtils.format("%s_descriptor.json", dataSegment.getShardSpec().getPartitionNum()) ); log.info("Creating descriptor file at[%s]", tmpDescriptorFile); @@ -171,11 +173,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher to ); } else { - throw new IOException(String.format( - "Failed to rename temp Index file[%s] and final segment path[%s] is not present.", - from, - to - )); + throw new IOE("Failed to rename temp Index file[%s] and final segment path[%s] is not present.", from, to); } } } @@ -214,7 +212,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher { return JOINER.join( segment.getDataSource(), - String.format( + StringUtils.format( "%s_%s", segment.getInterval().getStart().toString(ISODateTimeFormat.basicDateTime()), segment.getInterval().getEnd().toString(ISODateTimeFormat.basicDateTime()) @@ -226,7 +224,7 @@ public class HdfsDataSegmentPusher implements DataSegmentPusher @Override public String makeIndexPathName(DataSegment dataSegment, String indexName) { - return String.format( + return StringUtils.format( "./%s/%d_%s", this.getStorageDir(dataSegment), dataSegment.getShardSpec().getPartitionNum(), diff --git a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java index e539aaba994..5851c2fa48a 100644 --- a/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java +++ b/extensions-core/hdfs-storage/src/main/java/io/druid/storage/hdfs/tasklog/HdfsTaskLogs.java @@ -22,6 +22,7 @@ import com.google.common.base.Optional; import com.google.common.io.ByteSource; import com.google.common.io.ByteStreams; import com.google.inject.Inject; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import org.apache.hadoop.conf.Configuration; @@ -135,7 +136,7 @@ public class HdfsTaskLogs implements TaskLogs if (fs.exists(taskLogDir)) { if (!fs.isDirectory(taskLogDir)) { - throw new IOException(String.format("taskLogDir [%s] must be a directory.", taskLogDir)); + throw new IOE("taskLogDir [%s] must be a directory.", taskLogDir); } RemoteIterator iter = fs.listLocatedStatus(taskLogDir); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/indexing/common/tasklogs/HdfsTaskLogsTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/indexing/common/tasklogs/HdfsTaskLogsTest.java index 0ff2f34f461..9b645a2abd7 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/indexing/common/tasklogs/HdfsTaskLogsTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/indexing/common/tasklogs/HdfsTaskLogsTest.java @@ -57,7 +57,7 @@ public class HdfsTaskLogsTest final Map expected = ImmutableMap.of(0L, "blah", 1L, "lah", -2L, "ah", -5L, "blah"); for (Map.Entry entry : expected.entrySet()) { final String string = readLog(taskLogs, "foo", entry.getKey()); - Assert.assertEquals(String.format("Read with offset %,d", entry.getKey()), string, entry.getValue()); + Assert.assertEquals(StringUtils.format("Read with offset %,d", entry.getKey()), string, entry.getValue()); } } diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java index bc90dda9c34..8a835caa5b0 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentFinderTest.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.IOE; import io.druid.storage.hdfs.HdfsDataSegmentFinder; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NumberedShardSpec; @@ -157,7 +158,7 @@ public class HdfsDataSegmentFinderTest hdfsTmpDir = File.createTempFile("hdfsDataSource", "dir"); if (!hdfsTmpDir.delete()) { - throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); + throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java index 04d9201959d..343454cc35b 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsDataSegmentPullerTest.java @@ -21,6 +21,7 @@ package io.druid.segment.loading; import com.google.common.io.ByteStreams; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsDataSegmentPuller; import org.apache.commons.io.FileUtils; @@ -63,7 +64,7 @@ public class HdfsDataSegmentPullerTest { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); if (!hdfsTmpDir.delete()) { - throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); + throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java index f5b404a170f..2e5b13dcf57 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/segment/loading/HdfsFileTimestampVersionFinderTest.java @@ -20,6 +20,7 @@ package io.druid.segment.loading; import com.google.common.io.ByteStreams; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; import io.druid.storage.hdfs.HdfsFileTimestampVersionFinder; import org.apache.commons.io.FileUtils; @@ -59,7 +60,7 @@ public class HdfsFileTimestampVersionFinderTest { hdfsTmpDir = File.createTempFile("hdfsHandlerTest", "dir"); if (!hdfsTmpDir.delete()) { - throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); + throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java index 7f270b87836..d9118bf2292 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentKillerTest.java @@ -21,6 +21,7 @@ package io.druid.storage.hdfs; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.apache.hadoop.conf.Configuration; @@ -173,8 +174,8 @@ public class HdfsDataSegmentKillerTest Assert.assertTrue(fs.mkdirs(path)); try (FSDataOutputStream os = fs.create(new Path( path, - String.format("%s_index.zip", partitionNumber) - )); FSDataOutputStream oos = fs.create(new Path(path, String.format("%s_descriptor.json", partitionNumber)))) { + StringUtils.format("%s_index.zip", partitionNumber) + )); FSDataOutputStream oos = fs.create(new Path(path, StringUtils.format("%s_descriptor.json", partitionNumber)))) { } } diff --git a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java index 8191e60df2f..7caa3214d3c 100644 --- a/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java +++ b/extensions-core/hdfs-storage/src/test/java/io/druid/storage/hdfs/HdfsDataSegmentPusherTest.java @@ -42,6 +42,7 @@ import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.JobHelper; import io.druid.jackson.DefaultObjectMapper; import io.druid.jackson.GranularityModule; +import io.druid.java.util.common.StringUtils; import io.druid.segment.loading.LocalDataSegmentPusher; import io.druid.segment.loading.LocalDataSegmentPusherConfig; import io.druid.timeline.DataSegment; @@ -139,7 +140,7 @@ public class HdfsDataSegmentPusherTest config.setStorageDirectory( scheme != null - ? String.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) + ? StringUtils.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) : storageDirectory.getAbsolutePath() ); HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper()); @@ -159,7 +160,7 @@ public class HdfsDataSegmentPusherTest DataSegment segment = pusher.push(segmentDir, segmentToPush); - String indexUri = String.format( + String indexUri = StringUtils.format( "%s/%s/%d_index.zip", FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(), pusher.getStorageDir(segmentToPush), @@ -177,14 +178,14 @@ public class HdfsDataSegmentPusherTest // rename directory after push final String segmentPath = pusher.getStorageDir(segment); - File indexFile = new File(String.format( + File indexFile = new File(StringUtils.format( "%s/%s/%d_index.zip", storageDirectory, segmentPath, segment.getShardSpec().getPartitionNum() )); Assert.assertTrue(indexFile.exists()); - File descriptorFile = new File(String.format( + File descriptorFile = new File(StringUtils.format( "%s/%s/%d_descriptor.json", storageDirectory, segmentPath, @@ -193,7 +194,7 @@ public class HdfsDataSegmentPusherTest Assert.assertTrue(descriptorFile.exists()); // push twice will fail and temp dir cleaned - File outDir = new File(String.format("%s/%s", config.getStorageDirectory(), segmentPath)); + File outDir = new File(StringUtils.format("%s/%s", config.getStorageDirectory(), segmentPath)); outDir.setReadOnly(); try { pusher.push(segmentDir, segmentToPush); @@ -221,7 +222,7 @@ public class HdfsDataSegmentPusherTest config.setStorageDirectory( scheme != null - ? String.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) + ? StringUtils.format("%s://%s", scheme, storageDirectory.getAbsolutePath()) : storageDirectory.getAbsolutePath() ); HdfsDataSegmentPusher pusher = new HdfsDataSegmentPusher(config, conf, new DefaultObjectMapper()); @@ -243,7 +244,7 @@ public class HdfsDataSegmentPusherTest for (int i = 0; i < numberOfSegments; i++) { final DataSegment pushedSegment = pusher.push(segmentDir, segments[i]); - String indexUri = String.format( + String indexUri = StringUtils.format( "%s/%s/%d_index.zip", FileSystem.newInstance(conf).makeQualified(new Path(config.getStorageDirectory())).toUri().toString(), pusher.getStorageDir(segments[i]), @@ -261,14 +262,14 @@ public class HdfsDataSegmentPusherTest // rename directory after push String segmentPath = pusher.getStorageDir(pushedSegment); - File indexFile = new File(String.format( + File indexFile = new File(StringUtils.format( "%s/%s/%d_index.zip", storageDirectory, segmentPath, pushedSegment.getShardSpec().getPartitionNum() )); Assert.assertTrue(indexFile.exists()); - File descriptorFile = new File(String.format( + File descriptorFile = new File(StringUtils.format( "%s/%s/%d_descriptor.json", storageDirectory, segmentPath, @@ -290,7 +291,7 @@ public class HdfsDataSegmentPusherTest // rename directory after push segmentPath = pusher.getStorageDir(fromDescriptorFileDataSegment); - indexFile = new File(String.format( + indexFile = new File(StringUtils.format( "%s/%s/%d_index.zip", storageDirectory, segmentPath, @@ -300,7 +301,7 @@ public class HdfsDataSegmentPusherTest // push twice will fail and temp dir cleaned - File outDir = new File(String.format("%s/%s", config.getStorageDirectory(), segmentPath)); + File outDir = new File(StringUtils.format("%s/%s", config.getStorageDirectory(), segmentPath)); outDir.setReadOnly(); try { pusher.push(segmentDir, segments[i]); diff --git a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java index 25d3061550e..94a704e4393 100644 --- a/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java +++ b/extensions-core/histogram/src/main/java/io/druid/query/aggregation/histogram/sql/QuantileSqlAggregator.java @@ -21,6 +21,7 @@ package io.druid.query.aggregation.histogram.sql; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableList; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.histogram.ApproximateHistogram; import io.druid.query.aggregation.histogram.ApproximateHistogramAggregatorFactory; @@ -87,7 +88,7 @@ public class QuantileSqlAggregator implements SqlAggregator } final AggregatorFactory aggregatorFactory; - final String histogramName = String.format("%s:agg", name); + final String histogramName = StringUtils.format("%s:agg", name); final RexNode probabilityArg = Expressions.fromFieldAccess( rowSignature, project, diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java index 3810e53ec8e..867bd13fbf7 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramErrorBenchmark.java @@ -23,6 +23,7 @@ import com.google.common.primitives.Floats; import io.druid.query.aggregation.Histogram; import java.util.Arrays; +import java.util.Locale; import java.util.Random; public class ApproximateHistogramErrorBenchmark @@ -104,10 +105,21 @@ public class ApproximateHistogramErrorBenchmark errs2[i] = tmp[1]; } - System.out - .format("Number of histograms for folding : %s \n", Arrays.toString(numHistsArray)); - System.out.format("Errors for approximate histogram : %s \n", Arrays.toString(errs1)); - System.out.format("Errors for approximate histogram, ruleFold : %s \n", Arrays.toString(errs2)); + System.out.printf( + Locale.ENGLISH, + "Number of histograms for folding : %s %n", + Arrays.toString(numHistsArray) + ); + System.out.printf( + Locale.ENGLISH, + "Errors for approximate histogram : %s %n", + Arrays.toString(errs1) + ); + System.out.printf( + Locale.ENGLISH, + "Errors for approximate histogram, ruleFold : %s %n", + Arrays.toString(errs2) + ); } private float[] getErrors() @@ -181,9 +193,9 @@ public class ApproximateHistogramErrorBenchmark System.out.println(ah1.toHistogram(breaks)); System.out.println("Approximate Histogram Rule Fold:"); System.out.println(ah2.toHistogram(breaks)); - System.out.format("Error for approximate histogram: %s \n", err1); - System.out.format("Error for approximate histogram, ruleFold: %s \n", err2); - System.out.format("Error ratio for AHRF: %s \n", err2 / err1); + System.out.printf(Locale.ENGLISH, "Error for approximate histogram: %f %n", err1); + System.out.printf(Locale.ENGLISH, "Error for approximate histogram, ruleFold: %f %n", err2); + System.out.printf(Locale.ENGLISH, "Error ratio for AHRF: %f %n", err2 / err1); } return new float[]{err1, err2, err2 / err1}; } diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java index 55e1c51a45c..042a8c0f502 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation.histogram; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerTestHelper; import io.druid.query.dimension.DefaultDimensionSpec; @@ -118,7 +119,7 @@ public class ApproximateHistogramGroupByQueryTest for (GroupByQueryConfig config : configs) { final GroupByQueryRunnerFactory factory = GroupByQueryRunnerTest.makeQueryRunnerFactory(config); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { - final String testName = String.format( + final String testName = StringUtils.format( "config=%s, runner=%s", config.toString(), runner.toString() diff --git a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java index 9d6d2347e26..e4e2a0af525 100644 --- a/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java +++ b/extensions-core/histogram/src/test/java/io/druid/query/aggregation/histogram/ApproximateHistogramTest.java @@ -21,6 +21,7 @@ package io.druid.query.aggregation.histogram; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -254,7 +255,7 @@ public class ApproximateHistogramTest tFold += System.nanoTime() - t0; } - System.out.println(String.format("Average folds per second : %f", (double) count / (double) tFold * 1e9)); + System.out.println(StringUtils.format("Average folds per second : %f", (double) count / (double) tFold * 1e9)); } @Test diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java index 6b723ff5053..96c1970f773 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/io/druid/query/lookup/TestKafkaExtractionCluster.java @@ -299,7 +299,7 @@ public class TestKafkaExtractionCluster kafkaProducerProperties.putAll(kafkaProperties); kafkaProducerProperties.put( "metadata.broker.list", - String.format("127.0.0.1:%d", kafkaServer.socketServer().port()) + StringUtils.format("127.0.0.1:%d", kafkaServer.socketServer().port()) ); kafkaProperties.put("request.required.acks", "1"); return kafkaProducerProperties; diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java index 7e44c17d537..432ec25cbed 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTask.java @@ -52,6 +52,7 @@ import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.task.AbstractTask; import io.druid.indexing.common.task.TaskResource; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.DruidMetrics; @@ -197,7 +198,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler { super( id == null ? makeTaskId(dataSchema.getDataSource(), RANDOM.nextInt()) : id, - String.format("%s_%s", TYPE, dataSchema.getDataSource()), + StringUtils.format("%s_%s", TYPE, dataSchema.getDataSource()), taskResource, dataSchema.getDataSource(), context @@ -332,7 +333,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler // Set up sequenceNames. final Map sequenceNames = Maps.newHashMap(); for (Integer partitionNum : nextOffsets.keySet()) { - sequenceNames.put(partitionNum, String.format("%s_%s", ioConfig.getBaseSequenceName(), partitionNum)); + sequenceNames.put(partitionNum, StringUtils.format("%s_%s", ioConfig.getBaseSequenceName(), partitionNum)); } // Set up committer. @@ -712,7 +713,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler } else if (!endOffsets.keySet().containsAll(offsets.keySet())) { return Response.status(Response.Status.BAD_REQUEST) .entity( - String.format( + StringUtils.format( "Request contains partitions not being handled by this task, my partitions: %s", endOffsets.keySet() ) @@ -732,7 +733,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler if (entry.getValue().compareTo(nextOffsets.get(entry.getKey())) < 0) { return Response.status(Response.Status.BAD_REQUEST) .entity( - String.format( + StringUtils.format( "End offset must be >= current offset for partition [%s] (current: %s)", entry.getKey(), nextOffsets.get(entry.getKey()) @@ -773,7 +774,7 @@ public class KafkaIndexTask extends AbstractTask implements ChatHandler { if (!(status == Status.PAUSED || status == Status.READING)) { return Response.status(Response.Status.BAD_REQUEST) - .entity(String.format("Can't pause, task is not in a pausable state (state: [%s])", status)) + .entity(StringUtils.format("Can't pause, task is not in a pausable state (state: [%s])", status)) .build(); } diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java index b2294bba094..db1618bf1f0 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaIndexTaskClient.java @@ -42,7 +42,9 @@ import io.druid.indexing.common.TaskInfoProvider; import io.druid.indexing.common.TaskLocation; import io.druid.indexing.common.TaskStatus; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.segment.realtime.firehose.ChatHandlerResource; import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -111,7 +113,7 @@ public class KafkaIndexTaskClient this.executorService = MoreExecutors.listeningDecorator( Execs.multiThreaded( numThreads, - String.format( + StringUtils.format( "KafkaIndexTaskClient-%s-%%d", dataSource ) @@ -174,7 +176,7 @@ public class KafkaIndexTaskClient id, HttpMethod.POST, "pause", - timeout > 0 ? String.format("timeout=%d", timeout) : null, + timeout > 0 ? StringUtils.format("timeout=%d", timeout) : null, true ); @@ -471,17 +473,17 @@ public class KafkaIndexTaskClient FullResponseHolder response = null; Request request = null; TaskLocation location = TaskLocation.unknown(); - String path = String.format("%s/%s/%s", BASE_PATH, id, pathSuffix); + String path = StringUtils.format("%s/%s/%s", BASE_PATH, id, pathSuffix); Optional status = taskInfoProvider.getTaskStatus(id); if (!status.isPresent() || !status.get().isRunnable()) { - throw new TaskNotRunnableException(String.format("Aborting request because task [%s] is not runnable", id)); + throw new TaskNotRunnableException(StringUtils.format("Aborting request because task [%s] is not runnable", id)); } try { location = taskInfoProvider.getTaskLocation(id); if (location.equals(TaskLocation.unknown())) { - throw new NoTaskLocationException(String.format("No TaskLocation available for task [%s]", id)); + throw new NoTaskLocationException(StringUtils.format("No TaskLocation available for task [%s]", id)); } // Netty throws some annoying exceptions if a connection can't be opened, which happens relatively frequently @@ -514,7 +516,7 @@ public class KafkaIndexTaskClient } else if (responseCode == 400) { // don't bother retrying if it's a bad request throw new IAE("Received 400 Bad Request with body: %s", response.getContent()); } else { - throw new IOException(String.format("Received status [%d]", responseCode)); + throw new IOE("Received status [%d]", responseCode); } } catch (IOException | ChannelException e) { @@ -544,7 +546,7 @@ public class KafkaIndexTaskClient String urlForLog = (request != null ? request.getUrl().toString() - : String.format("http://%s:%d%s", location.getHost(), location.getPort(), path)); + : StringUtils.format("http://%s:%d%s", location.getHost(), location.getPort(), path)); if (!retry) { // if retry=false, we probably aren't too concerned if the operation doesn't succeed (i.e. the request was // for informational purposes only) so don't log a scary stack trace diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaPartitions.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaPartitions.java index f0d7370bfc8..4d9bf3d9870 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaPartitions.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/KafkaPartitions.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.StringUtils; import java.util.Map; import java.util.Objects; @@ -45,7 +46,7 @@ public class KafkaPartitions for (Map.Entry entry : partitionOffsetMap.entrySet()) { Preconditions.checkArgument( entry.getValue() >= 0, - String.format( + StringUtils.format( "partition[%d] offset[%d] invalid", entry.getKey(), entry.getValue() diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java index 349afac438c..155ce5ceff8 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisor.java @@ -67,6 +67,7 @@ import io.druid.indexing.overlord.supervisor.Supervisor; import io.druid.indexing.overlord.supervisor.SupervisorReport; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.EntryExistsException; import io.druid.server.metrics.DruidMonitorSchedulerConfig; import org.apache.commons.codec.digest.DigestUtils; @@ -238,7 +239,7 @@ public class KafkaSupervisor implements Supervisor this.ioConfig = spec.getIoConfig(); this.tuningConfig = spec.getTuningConfig(); this.taskTuningConfig = KafkaTuningConfig.copyOf(this.tuningConfig); - this.supervisorId = String.format("KafkaSupervisor-%s", dataSource); + this.supervisorId = StringUtils.format("KafkaSupervisor-%s", dataSource); this.exec = Execs.singleThreaded(supervisorId); this.scheduledExec = Execs.scheduledSingleThreaded(supervisorId + "-Scheduler-%d"); this.reportingExec = Execs.scheduledSingleThreaded(supervisorId + "-Reporting-%d"); @@ -697,7 +698,7 @@ public class KafkaSupervisor implements Supervisor Map startPartitions = taskGroups.get(groupId).partitionOffsets; for (Map.Entry entry : startPartitions.entrySet()) { - sb.append(String.format("+%d(%d)", entry.getKey(), entry.getValue())); + sb.append(StringUtils.format("+%d(%d)", entry.getKey(), entry.getValue())); } String partitionOffsetStr = sb.toString().substring(1); @@ -733,7 +734,7 @@ public class KafkaSupervisor implements Supervisor final Properties props = new Properties(); props.setProperty("metadata.max.age.ms", "10000"); - props.setProperty("group.id", String.format("kafka-supervisor-%s", getRandomId())); + props.setProperty("group.id", StringUtils.format("kafka-supervisor-%s", getRandomId())); props.putAll(ioConfig.getConsumerProperties()); diff --git a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java index 395531b59c3..8f6d5c0cab4 100644 --- a/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java +++ b/extensions-core/kafka-indexing-service/src/main/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorIOConfig.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Optional; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; import org.joda.time.Duration; import org.joda.time.Period; @@ -63,7 +64,7 @@ public class KafkaSupervisorIOConfig this.consumerProperties = Preconditions.checkNotNull(consumerProperties, "consumerProperties"); Preconditions.checkNotNull( consumerProperties.get(BOOTSTRAP_SERVERS_KEY), - String.format("consumerProperties must contain entry for [%s]", BOOTSTRAP_SERVERS_KEY) + StringUtils.format("consumerProperties must contain entry for [%s]", BOOTSTRAP_SERVERS_KEY) ); this.replicas = replicas != null ? replicas : 1; diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 9f7be709567..01179cd8f6c 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -378,7 +378,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport Capture captured = Capture.newInstance(); expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn(String.format("\"%s\"", status.toString())).anyTimes(); + expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes(); expect(httpClient.go(capture(captured), anyObject(FullResponseHandler.class), eq(TEST_HTTP_TIMEOUT))).andReturn( Futures.immediateFuture(responseHolder) ); @@ -634,7 +634,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "stop"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "stop"))); futures.add(client.stopAsync(TEST_IDS.get(i), false)); } @@ -666,7 +666,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "resume"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "resume"))); futures.add(client.resumeAsync(TEST_IDS.get(i))); } @@ -699,7 +699,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List>> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "pause"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "pause"))); futures.add(client.pauseAsync(TEST_IDS.get(i))); } @@ -732,7 +732,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List>> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "pause?timeout=9"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "pause?timeout=9"))); futures.add(client.pauseAsync(TEST_IDS.get(i), 9)); } @@ -765,7 +765,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "status"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "status"))); futures.add(client.getStatusAsync(TEST_IDS.get(i))); } @@ -799,7 +799,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "time/start"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "time/start"))); futures.add(client.getStartTimeAsync(TEST_IDS.get(i))); } @@ -832,7 +832,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List>> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/current"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/current"))); futures.add(client.getCurrentOffsetsAsync(TEST_IDS.get(i), false)); } @@ -865,7 +865,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List>> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/end"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/end"))); futures.add(client.getEndOffsetsAsync(TEST_IDS.get(i))); } @@ -898,7 +898,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport List expectedUrls = Lists.newArrayList(); List> futures = Lists.newArrayList(); for (int i = 0; i < numRequests; i++) { - expectedUrls.add(new URL(String.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/end"))); + expectedUrls.add(new URL(StringUtils.format(URL_FORMATTER, TEST_HOST, TEST_PORT, TEST_IDS.get(i), "offsets/end"))); futures.add(client.setEndOffsetsAsync(TEST_IDS.get(i), endOffsets)); } @@ -933,7 +933,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport for (int i = 0; i < numRequests; i++) { expectedUrls.add( new URL( - String.format( + StringUtils.format( URL_FORMATTER, TEST_HOST, TEST_PORT, diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java index fd3761debff..a59462a98af 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -1593,7 +1593,7 @@ public class KafkaIndexTaskTest private List readSegmentDim1(final SegmentDescriptor descriptor) throws IOException { File indexZip = new File( - String.format( + StringUtils.format( "%s/%s/%s_%s/%s/%d/index.zip", getSegmentDirectory(), DATA_SCHEMA.getDataSource(), @@ -1605,7 +1605,7 @@ public class KafkaIndexTaskTest ); File outputLocation = new File( directory, - String.format( + StringUtils.format( "%s_%s_%s_%s", descriptor.getInterval().getStart(), descriptor.getInterval().getEnd(), diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 058203e71b0..c67dce5928b 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -97,7 +97,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Executor; -import static java.lang.String.format; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.capture; @@ -166,7 +165,7 @@ public class KafkaSupervisorTest extends EasyMockSupport ImmutableMap.of("num.partitions", String.valueOf(NUM_PARTITIONS)) ); kafkaServer.start(); - kafkaHost = String.format("localhost:%d", kafkaServer.getPort()); + kafkaHost = StringUtils.format("localhost:%d", kafkaServer.getPort()); dataSchema = getDataSchema(DATASOURCE); } @@ -1433,7 +1432,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); taskClient.close(); - taskRunner.unregisterListener(String.format("KafkaSupervisor-%s", DATASOURCE)); + taskRunner.unregisterListener(StringUtils.format("KafkaSupervisor-%s", DATASOURCE)); replayAll(); supervisor = getSupervisor(1, 1, true, "PT1H", null, false); @@ -1719,7 +1718,7 @@ public class KafkaSupervisorTest extends EasyMockSupport topic, i, null, - StringUtils.toUtf8(format("event-%d", j)) + StringUtils.toUtf8(StringUtils.format("event-%d", j)) ) ).get(); } @@ -1888,7 +1887,7 @@ public class KafkaSupervisorTest extends EasyMockSupport @Override protected String generateSequenceName(int groupId) { - return String.format("sequenceName-%d", groupId); + return StringUtils.format("sequenceName-%d", groupId); } } } diff --git a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java index e90fa4c4269..9e7f69140f8 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java +++ b/extensions-core/kafka-indexing-service/src/test/java/io/druid/indexing/kafka/test/TestBroker.java @@ -22,6 +22,7 @@ package io.druid.indexing.kafka.test; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.io.Files; +import io.druid.java.util.common.StringUtils; import kafka.server.KafkaConfig; import kafka.server.KafkaServer; import org.apache.commons.io.FileUtils; @@ -80,7 +81,7 @@ public class TestBroker implements Closeable final KafkaConfig config = new KafkaConfig(props); - server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(String.format("TestingBroker[%d]-", id)), List$.MODULE$.empty()); + server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(StringUtils.format("TestingBroker[%d]-", id)), List$.MODULE$.empty()); server.startup(); } @@ -102,7 +103,7 @@ public class TestBroker implements Closeable public Map producerProperties() { final Map props = Maps.newHashMap(); - props.put("bootstrap.servers", String.format("localhost:%d", getPort())); + props.put("bootstrap.servers", StringUtils.format("localhost:%d", getPort())); props.put("key.serializer", ByteArraySerializer.class.getName()); props.put("value.serializer", ByteArraySerializer.class.getName()); props.put("acks", "all"); @@ -112,7 +113,7 @@ public class TestBroker implements Closeable public Map consumerProperties() { final Map props = Maps.newHashMap(); - props.put("bootstrap.servers", String.format("localhost:%d", getPort())); + props.put("bootstrap.servers", StringUtils.format("localhost:%d", getPort())); props.put("key.deserializer", ByteArrayDeserializer.class.getName()); props.put("value.deserializer", ByteArrayDeserializer.class.getName()); props.put("group.id", String.valueOf(RANDOM.nextInt())); diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java index 14b1dab8fcb..b507a47641a 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/NamespaceLookupExtractorFactory.java @@ -80,7 +80,7 @@ public class NamespaceLookupExtractorFactory implements LookupExtractorFactory Preconditions.checkArgument(this.firstCacheTimeout >= 0); this.injective = injective; this.cacheScheduler = cacheScheduler; - this.extractorID = String.format("namespace-factory-%s-%s", extractionNamespace, UUID.randomUUID().toString()); + this.extractorID = StringUtils.format("namespace-factory-%s-%s", extractionNamespace, UUID.randomUUID().toString()); this.lookupIntrospectHandler = new NamespaceLookupIntrospectHandler(this); } diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/JdbcExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/JdbcExtractionNamespace.java index f7f24d65cbf..78478b61c52 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/JdbcExtractionNamespace.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/JdbcExtractionNamespace.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.MetadataStorageConnectorConfig; import org.joda.time.Period; @@ -108,7 +109,7 @@ public class JdbcExtractionNamespace implements ExtractionNamespace @Override public String toString() { - return String.format( + return StringUtils.format( "JdbcExtractionNamespace = { connectorConfig = { %s }, table = %s, keyColumn = %s, valueColumn = %s, tsColumn = %s, pollPeriod = %s}", connectorConfig.toString(), table, diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java index e36275909c4..8505cf3868e 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/query/lookup/namespace/UriExtractionNamespace.java @@ -37,6 +37,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.druid.guice.annotations.Json; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; import io.druid.java.util.common.parsers.CSVParser; import io.druid.java.util.common.parsers.DelimitedParser; @@ -360,7 +361,7 @@ public class UriExtractionNamespace implements ExtractionNamespace @Override public String toString() { - return String.format( + return StringUtils.format( "CSVFlatDataParser = { columns = %s, keyColumn = %s, valueColumn = %s }", Arrays.toString(columns.toArray()), keyColumn, @@ -502,7 +503,7 @@ public class UriExtractionNamespace implements ExtractionNamespace @Override public String toString() { - return String.format( + return StringUtils.format( "TSVFlatDataParser = { columns = %s, delimiter = '%s', listDelimiter = '%s',keyColumn = %s, valueColumn = %s }", Arrays.toString(columns.toArray()), delimiter, @@ -580,7 +581,7 @@ public class UriExtractionNamespace implements ExtractionNamespace @Override public String toString() { - return String.format( + return StringUtils.format( "JSONFlatDataParser = { keyFieldName = %s, valueFieldName = %s }", keyFieldName, valueFieldName diff --git a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java index 1a4df7f1db8..88924deedab 100644 --- a/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java +++ b/extensions-core/lookups-cached-global/src/main/java/io/druid/server/lookup/namespace/JdbcCacheGenerator.java @@ -21,6 +21,7 @@ package io.druid.server.lookup.namespace; import io.druid.common.utils.JodaUtils; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.query.lookup.namespace.CacheGenerator; import io.druid.query.lookup.namespace.JdbcExtractionNamespace; @@ -78,7 +79,7 @@ public final class JdbcCacheGenerator implements CacheGenerator> withHandle(Handle handle) throws Exception { final String query; - query = String.format( + query = StringUtils.format( "SELECT %s, %s FROM %s", keyColumn, valueColumn, @@ -109,7 +110,7 @@ public final class JdbcCacheGenerator implements CacheGenerator implements PollingCache public OffHeapPollingCache(final Iterable> entries) { synchronized (started) { - this.cacheName = String.format("cache-%s", UUID.randomUUID()); - this.reverseCacheName = String.format("reverseCache-%s", UUID.randomUUID()); + this.cacheName = StringUtils.format("cache-%s", UUID.randomUUID()); + this.reverseCacheName = StringUtils.format("reverseCache-%s", UUID.randomUUID()); mapCache = DB.createHashMap(cacheName).make(); reverseCache = DB.createHashMap(reverseCacheName).make(); ImmutableSet.Builder setOfValuesBuilder = ImmutableSet.builder(); diff --git a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java index 28f5a7a362a..bb133b7afac 100644 --- a/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java +++ b/extensions-core/lookups-cached-single/src/main/java/io/druid/server/lookup/jdbc/JdbcDataFetcher.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.server.lookup.DataFetcher; @@ -75,19 +76,19 @@ public class JdbcDataFetcher implements DataFetcher this.keyColumn = Preconditions.checkNotNull(keyColumn, "keyColumn"); this.valueColumn = Preconditions.checkNotNull(valueColumn, "valueColumn"); - this.fetchAllQuery = String.format( + this.fetchAllQuery = StringUtils.format( "SELECT %s, %s FROM %s", this.keyColumn, this.valueColumn, this.table ); - this.fetchQuery = String.format( + this.fetchQuery = StringUtils.format( "SELECT %s FROM %s WHERE %s = :val", this.valueColumn, this.table, this.keyColumn ); - this.reverseFetchQuery = String.format( + this.reverseFetchQuery = StringUtils.format( "SELECT %s FROM %s WHERE %s = :val", this.keyColumn, this.table, diff --git a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/jdbc/JdbcDataFetcherTest.java b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/jdbc/JdbcDataFetcherTest.java index 2f137d64df4..27ebf511ab8 100644 --- a/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/jdbc/JdbcDataFetcherTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/io/druid/server/lookup/jdbc/JdbcDataFetcherTest.java @@ -22,6 +22,7 @@ package io.druid.server.lookup.jdbc; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Sets; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.TestDerbyConnector; import io.druid.server.lookup.DataFetcher; @@ -66,7 +67,7 @@ public class JdbcDataFetcherTest Assert.assertEquals( 0, handle.createStatement( - String.format( + StringUtils.format( "CREATE TABLE %s (%s VARCHAR(64), %s VARCHAR(64))", tableName, keyColumn, @@ -74,7 +75,7 @@ public class JdbcDataFetcherTest ) ).setQueryTimeout(1).execute() ); - handle.createStatement(String.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute(); + handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute(); for (Map.Entry entry : lookupMap.entrySet()) { insertValues(entry.getKey(), entry.getValue(), handle); @@ -167,9 +168,9 @@ public class JdbcDataFetcherTest { final String query; handle.createStatement( - String.format("DELETE FROM %s WHERE %s='%s'", tableName, keyColumn, key) + StringUtils.format("DELETE FROM %s WHERE %s='%s'", tableName, keyColumn, key) ).setQueryTimeout(1).execute(); - query = String.format( + query = StringUtils.format( "INSERT INTO %s (%s, %s) VALUES ('%s', '%s')", tableName, keyColumn, valueColumn, diff --git a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java index 0b519e35c7a..e8b4f4443d5 100644 --- a/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java +++ b/extensions-core/mysql-metadata-storage/src/main/java/io/druid/metadata/storage/mysql/MySQLConnector.java @@ -25,6 +25,7 @@ import com.google.inject.Inject; import com.mysql.jdbc.exceptions.MySQLTransientException; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; @@ -135,7 +136,7 @@ public class MySQLConnector extends SQLMetadataConnector public Void withHandle(Handle handle) throws Exception { handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %1$s (%2$s, %3$s) VALUES (:key, :value) ON DUPLICATE KEY UPDATE %3$s = :value", tableName, keyColumn, diff --git a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java index 9f240837a68..4c72130d26e 100644 --- a/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java +++ b/extensions-core/postgresql-metadata-storage/src/main/java/io/druid/metadata/storage/postgresql/PostgreSQLConnector.java @@ -22,6 +22,7 @@ package io.druid.metadata.storage.postgresql; import com.google.common.base.Supplier; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.MetadataStorageConnectorConfig; import io.druid.metadata.MetadataStorageTablesConfig; @@ -126,7 +127,7 @@ public class PostgreSQLConnector extends SQLMetadataConnector { if (canUpsert(handle)) { handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %1$s (%2$s, %3$s) VALUES (:key, :value) ON CONFLICT (%2$s) DO UPDATE SET %3$s = EXCLUDED.%3$s", tableName, keyColumn, @@ -138,7 +139,7 @@ public class PostgreSQLConnector extends SQLMetadataConnector .execute(); } else { handle.createStatement( - String.format( + StringUtils.format( "BEGIN;\n" + "LOCK TABLE %1$s IN SHARE ROW EXCLUSIVE MODE;\n" + "WITH upsert AS (UPDATE %1$s SET %3$s=:value WHERE %2$s=:key RETURNING *)\n" + diff --git a/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufInputRowParser.java b/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufInputRowParser.java index 0423e28d8ad..b039539df49 100644 --- a/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufInputRowParser.java +++ b/extensions-core/protobuf-extensions/src/main/java/io/druid/data/input/protobuf/ProtobufInputRowParser.java @@ -32,6 +32,7 @@ import io.druid.data.input.ByteBufferInputRowParser; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.ParseSpec; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.parsers.ParseException; import io.druid.java.util.common.parsers.Parser; @@ -139,7 +140,7 @@ public class ProtobufInputRowParser implements ByteBufferInputRowParser Descriptor desc = dynamicSchema.getMessageDescriptor(messageType); if (desc == null) { throw new ParseException( - String.format( + StringUtils.format( "Protobuf message type %s not found in the specified descriptor. Available messages types are %s", protoMessageType, messageTypes diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java index 23b331f6325..cffe2be30f6 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentMover.java @@ -136,7 +136,7 @@ public class S3DataSegmentMover implements DataSegmentMover s3Object.getStorageClass().equals(S3Object.STORAGE_CLASS_GLACIER)) { log.warn("Cannot move file[s3://%s/%s] of storage class glacier, skipping.", s3Bucket, s3Path); } else { - final String copyMsg = StringUtils.safeFormat( + final String copyMsg = StringUtils.format( "[s3://%s/%s] to [s3://%s/%s]", s3Bucket, s3Path, targetS3Bucket, diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java index cd558a29eac..4722f102c3a 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPuller.java @@ -28,6 +28,7 @@ import com.google.inject.Inject; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.MapUtils; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; @@ -100,7 +101,7 @@ public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller } } catch (ServiceException e) { - throw new IOException(StringUtils.safeFormat("Could not load S3 URI [%s]", uri), e); + throw new IOE(e, "Could not load S3 URI [%s]", uri); } } @@ -178,7 +179,7 @@ public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller try { org.apache.commons.io.FileUtils.forceMkdir(outDir); - final URI uri = URI.create(String.format("s3://%s/%s", s3Coords.bucket, s3Coords.path)); + final URI uri = URI.create(StringUtils.format("s3://%s/%s", s3Coords.bucket, s3Coords.path)); final ByteSource byteSource = new ByteSource() { @Override @@ -250,7 +251,7 @@ public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller return buildFileObject(uri, s3Client).openInputStream(); } catch (ServiceException e) { - throw new IOException(String.format("Could not load URI [%s]", uri.toString()), e); + throw new IOE(e, "Could not load URI [%s]", uri); } } @@ -292,15 +293,12 @@ public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller { try { final FileObject object = buildFileObject(uri, s3Client); - return String.format("%d", object.getLastModified()); + return StringUtils.format("%d", object.getLastModified()); } catch (ServiceException e) { if (S3Utils.isServiceExceptionRecoverable(e)) { // The recoverable logic is always true for IOException, so we want to only pass IOException if it is recoverable - throw new IOException( - String.format("Could not fetch last modified timestamp from URI [%s]", uri.toString()), - e - ); + throw new IOE(e, "Could not fetch last modified timestamp from URI [%s]", uri); } else { throw Throwables.propagate(e); } @@ -366,7 +364,7 @@ public class S3DataSegmentPuller implements DataSegmentPuller, URIDataPuller @Override public String toString() { - return String.format("s3://%s/%s", bucket, path); + return StringUtils.format("s3://%s/%s", bucket, path); } } } diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java index f5020ed297c..ac79fa7cbec 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3DataSegmentPusher.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.metamx.emitter.EmittingLogger; import io.druid.java.util.common.CompressionUtils; +import io.druid.java.util.common.StringUtils; import io.druid.segment.SegmentUtils; import io.druid.segment.loading.DataSegmentPusher; import io.druid.timeline.DataSegment; @@ -66,9 +67,9 @@ public class S3DataSegmentPusher implements DataSegmentPusher public String getPathForHadoop() { if (config.isUseS3aSchema()) { - return String.format("s3a://%s/%s", config.getBucket(), config.getBaseKey()); + return StringUtils.format("s3a://%s/%s", config.getBucket(), config.getBaseKey()); } - return String.format("s3n://%s/%s", config.getBucket(), config.getBaseKey()); + return StringUtils.format("s3n://%s/%s", config.getBucket(), config.getBaseKey()); } @Deprecated diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java index 83e931e6bd5..ce2643cb1ef 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TaskLogs.java @@ -24,6 +24,8 @@ import com.google.common.base.Throwables; import com.google.common.io.ByteSource; import com.google.inject.Inject; +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import org.jets3t.service.ServiceException; @@ -103,7 +105,7 @@ public class S3TaskLogs implements TaskLogs || "NoSuchBucket".equals(e.getErrorCode())) { return Optional.absent(); } else { - throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e); + throw new IOE(e, "Failed to stream logs from: %s", taskKey); } } } @@ -137,7 +139,7 @@ public class S3TaskLogs implements TaskLogs private String getTaskLogKey(String taskid) { - return String.format("%s/%s/log", config.getS3Prefix(), taskid); + return StringUtils.format("%s/%s/log", config.getS3Prefix(), taskid); } @Override diff --git a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java index db6e96ea70c..fcfa445d691 100644 --- a/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java +++ b/extensions-core/s3-extensions/src/main/java/io/druid/storage/s3/S3TimestampVersionedDataFinder.java @@ -25,6 +25,7 @@ import com.google.inject.Inject; import io.druid.data.SearchableVersionedDataFinder; import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.StringUtils; import org.jets3t.service.impl.rest.httpclient.RestS3Service; import org.jets3t.service.model.S3Object; @@ -87,7 +88,7 @@ public class S3TimestampVersionedDataFinder extends S3DataSegmentPuller implemen final long latestModified = storageObject.getLastModifiedDate().getTime(); if (latestModified >= mostRecent) { mostRecent = latestModified; - latest = new URI(String.format("s3://%s/%s", storageObject.getBucketName(), storageObject.getKey())); + latest = new URI(StringUtils.format("s3://%s/%s", storageObject.getBucketName(), storageObject.getKey())); } } return latest; diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java index 72358e55d04..303bf657d6e 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3DataSegmentPullerTest.java @@ -20,6 +20,7 @@ package io.druid.storage.s3; import io.druid.java.util.common.FileUtils; +import io.druid.java.util.common.StringUtils; import io.druid.segment.loading.SegmentLoadingException; import java.io.File; import java.io.FileInputStream; @@ -68,11 +69,11 @@ public class S3DataSegmentPullerTest EasyMock.replay(s3Client); - String version = puller.getVersion(URI.create(String.format("s3://%s/%s", bucket, object0.getKey()))); + String version = puller.getVersion(URI.create(StringUtils.format("s3://%s/%s", bucket, object0.getKey()))); EasyMock.verify(s3Client); - Assert.assertEquals(String.format("%d", new Date(0).getTime()), version); + Assert.assertEquals(StringUtils.format("%d", new Date(0).getTime()), version); } @Test diff --git a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3TimestampVersionedDataFinderTest.java b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3TimestampVersionedDataFinderTest.java index 1f3430dd803..c2bdc347334 100644 --- a/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3TimestampVersionedDataFinderTest.java +++ b/extensions-core/s3-extensions/src/test/java/io/druid/storage/s3/S3TimestampVersionedDataFinderTest.java @@ -19,6 +19,7 @@ package io.druid.storage.s3; +import io.druid.java.util.common.StringUtils; import org.easymock.EasyMock; import org.jets3t.service.S3ServiceException; import org.jets3t.service.impl.rest.httpclient.RestS3Service; @@ -60,11 +61,11 @@ public class S3TimestampVersionedDataFinderTest EasyMock.replay(s3Client); - URI latest = finder.getLatestVersion(URI.create(String.format("s3://%s/%s", bucket, keyPrefix)), pattern); + URI latest = finder.getLatestVersion(URI.create(StringUtils.format("s3://%s/%s", bucket, keyPrefix)), pattern); EasyMock.verify(s3Client); - URI expected = URI.create(String.format("s3://%s/%s", bucket, object1.getKey())); + URI expected = URI.create(StringUtils.format("s3://%s/%s", bucket, object1.getKey())); Assert.assertEquals(expected, latest); } @@ -96,7 +97,7 @@ public class S3TimestampVersionedDataFinderTest EasyMock.replay(s3Client); - URI latest = finder.getLatestVersion(URI.create(String.format("s3://%s/%s", bucket, keyPrefix)), pattern); + URI latest = finder.getLatestVersion(URI.create(StringUtils.format("s3://%s/%s", bucket, keyPrefix)), pattern); EasyMock.verify(s3Client); @@ -126,11 +127,11 @@ public class S3TimestampVersionedDataFinderTest EasyMock.replay(s3Client); - URI latest = finder.getLatestVersion(URI.create(String.format("s3://%s/%s", bucket, keyPrefix)), pattern); + URI latest = finder.getLatestVersion(URI.create(StringUtils.format("s3://%s/%s", bucket, keyPrefix)), pattern); EasyMock.verify(s3Client); - URI expected = URI.create(String.format("s3://%s/%s", bucket, object0.getKey())); + URI expected = URI.create(StringUtils.format("s3://%s/%s", bucket, object0.getKey())); Assert.assertEquals(expected, latest); } @@ -157,11 +158,11 @@ public class S3TimestampVersionedDataFinderTest EasyMock.replay(s3Client); - URI latest = finder.getLatestVersion(URI.create(String.format("s3://%s/%s", bucket, object0.getKey())), null); + URI latest = finder.getLatestVersion(URI.create(StringUtils.format("s3://%s/%s", bucket, object0.getKey())), null); EasyMock.verify(s3Client); - URI expected = URI.create(String.format("s3://%s/%s", bucket, object0.getKey())); + URI expected = URI.create(StringUtils.format("s3://%s/%s", bucket, object0.getKey())); Assert.assertEquals(expected, latest); } diff --git a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java index 5c7744963ed..3ef1c525e2f 100644 --- a/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java +++ b/hll/src/test/java/io/druid/hll/HyperLogLogCollectorTest.java @@ -36,6 +36,7 @@ import java.security.MessageDigest; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.Locale; import java.util.Random; /** @@ -121,7 +122,7 @@ public class HyperLogLogCollectorTest System.out.println("Rolling buffer cardinality " + rolling.estimateCardinality()); System.out.println("Simple buffer cardinality " + simple.estimateCardinality()); System.out.println( - String.format( + StringUtils.format( "Rolling cardinality estimate off by %4.1f%%", 100 * (1 - rolling.estimateCardinality() / n) ) @@ -144,14 +145,18 @@ public class HyperLogLogCollectorTest theCollector.add(fn.hashLong(count).asBytes()); rolling.fold(theCollector); } - System.out.printf("testHighCardinalityRollingFold2 took %d ms%n", System.currentTimeMillis() - start); + System.out.printf( + Locale.ENGLISH, + "testHighCardinalityRollingFold2 took %d ms%n", + System.currentTimeMillis() - start + ); int n = count; System.out.println("True cardinality " + n); System.out.println("Rolling buffer cardinality " + rolling.estimateCardinality()); System.out.println( - String.format( + StringUtils.format( "Rolling cardinality estimate off by %4.1f%%", 100 * (1 - rolling.estimateCardinality() / n) ) @@ -837,6 +842,7 @@ public class HyperLogLogCollectorTest error += errorThisTime; System.out.printf( + Locale.ENGLISH, "%,d ==? %,f in %,d millis. actual error[%,f%%], avg. error [%,f%%]%n", numThings, estimatedValue, diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java index f231f9f005f..c97b7d75c18 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DetermineHashedPartitionsJob.java @@ -32,6 +32,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.hll.HyperLogLogCollector; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.logger.Logger; import io.druid.segment.indexing.granularity.UniformGranularitySpec; @@ -86,7 +87,7 @@ public class DetermineHashedPartitionsJob implements Jobby final long startTime = System.currentTimeMillis(); final Job groupByJob = Job.getInstance( new Configuration(), - String.format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals()) + StringUtils.format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals()) ); JobHelper.injectSystemProperties(groupByJob); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java index 0792272db1f..a1182969f33 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/DeterminePartitionsJob.java @@ -38,6 +38,7 @@ import io.druid.data.input.InputRow; import io.druid.data.input.Rows; import io.druid.indexer.partitions.SingleDimensionPartitionsSpec; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.nary.BinaryFn; import io.druid.java.util.common.logger.Logger; @@ -123,7 +124,7 @@ public class DeterminePartitionsJob implements Jobby if (!config.getPartitionsSpec().isAssumeGrouped()) { final Job groupByJob = Job.getInstance( new Configuration(), - String.format("%s-determine_partitions_groupby-%s", config.getDataSource(), config.getIntervals()) + StringUtils.format("%s-determine_partitions_groupby-%s", config.getDataSource(), config.getIntervals()) ); JobHelper.injectSystemProperties(groupByJob); @@ -163,7 +164,7 @@ public class DeterminePartitionsJob implements Jobby */ final Job dimSelectionJob = Job.getInstance( new Configuration(), - String.format("%s-determine_partitions_dimselection-%s", config.getDataSource(), config.getIntervals()) + StringUtils.format("%s-determine_partitions_dimselection-%s", config.getDataSource(), config.getIntervals()) ); dimSelectionJob.getConfiguration().set("io.sort.record.percent", "0.19"); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java index ef2b77155b0..51a46bd7946 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopDruidIndexerConfig.java @@ -46,6 +46,7 @@ import io.druid.guice.annotations.Self; import io.druid.indexer.partitions.PartitionsSpec; import io.druid.indexer.path.PathSpec; import io.druid.initialization.Initialization; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.logger.Logger; @@ -505,7 +506,7 @@ public class HadoopDruidIndexerConfig public Path makeIntermediatePath() { return new Path( - String.format( + StringUtils.format( "%s/%s/%s_%s", getWorkingPath(), schema.getDataSchema().getDataSource(), @@ -518,7 +519,7 @@ public class HadoopDruidIndexerConfig public Path makeSegmentPartitionInfoPath(Interval bucketInterval) { return new Path( - String.format( + StringUtils.format( "%s/%s_%s/partitions.json", makeIntermediatePath(), ISODateTimeFormat.basicDateTime().print(bucketInterval.getStart()), @@ -530,7 +531,7 @@ public class HadoopDruidIndexerConfig public Path makeIntervalInfoPath() { return new Path( - String.format( + StringUtils.format( "%s/intervals.json", makeIntermediatePath() ) @@ -549,7 +550,7 @@ public class HadoopDruidIndexerConfig public Path makeDescriptorInfoPath(DataSegment segment) { - return new Path(makeDescriptorInfoDir(), String.format("%s.json", segment.getIdentifier().replace(":", ""))); + return new Path(makeDescriptorInfoDir(), StringUtils.format("%s.json", segment.getIdentifier().replace(":", ""))); } public void addJobProperties(Job job) diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java index 9330d23ef80..efbc5a43468 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/IndexGeneratorJob.java @@ -41,6 +41,7 @@ import io.druid.data.input.Rows; import io.druid.indexer.hadoop.SegmentInputRow; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.segment.BaseProgressIndicator; @@ -158,7 +159,7 @@ public class IndexGeneratorJob implements Jobby try { Job job = Job.getInstance( new Configuration(), - String.format("%s-index-generator-%s", config.getDataSource(), config.getIntervals()) + StringUtils.format("%s-index-generator-%s", config.getDataSource(), config.getIntervals()) ); job.getConfiguration().set("io.sort.record.percent", "0.23"); @@ -618,14 +619,14 @@ public class IndexGeneratorJob implements Jobby ); runningTotalLineCount = lineCount; - final File file = new File(baseFlushFile, String.format("index%,05d", indexCount)); + final File file = new File(baseFlushFile, StringUtils.format("index%,05d", indexCount)); toMerge.add(file); context.progress(); final IncrementalIndex persistIndex = index; persistFutures.add( persistExecutor.submit( - new ThreadRenamingRunnable(String.format("%s-persist", file.getName())) + new ThreadRenamingRunnable(StringUtils.format("%s-persist", file.getName())) { @Override public void doRun() diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java index 34b19245138..0e9f0eb25d4 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/JobHelper.java @@ -29,8 +29,10 @@ import com.google.common.io.OutputSupplier; import io.druid.indexer.updater.HadoopDruidConverterConfig; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.segment.ProgressIndicator; import io.druid.segment.SegmentUtils; @@ -225,13 +227,7 @@ public class JobHelper log.info("Renaming jar to path[%s]", hdfsPath); fs.rename(intermediateHdfsPath, hdfsPath); if (!fs.exists(hdfsPath)) { - throw new IOException( - String.format( - "File does not exist even after moving from[%s] to [%s]", - intermediateHdfsPath, - hdfsPath - ) - ); + throw new IOE("File does not exist even after moving from[%s] to [%s]", intermediateHdfsPath, hdfsPath); } } catch (IOException e) { @@ -318,8 +314,8 @@ public class JobHelper for (String propName : System.getProperties().stringPropertyNames()) { for (String prefix : listOfAllowedPrefix) { if (propName.startsWith(prefix)) { - mapJavaOpts = String.format("%s -D%s=%s", mapJavaOpts, propName, System.getProperty(propName)); - reduceJavaOpts = String.format("%s -D%s=%s", reduceJavaOpts, propName, System.getProperty(propName)); + mapJavaOpts = StringUtils.format("%s -D%s=%s", mapJavaOpts, propName, System.getProperty(propName)); + reduceJavaOpts = StringUtils.format("%s -D%s=%s", reduceJavaOpts, propName, System.getProperty(propName)); break; } } @@ -350,7 +346,7 @@ public class JobHelper try { Job job = Job.getInstance( new Configuration(), - String.format("%s-determine_partitions-%s", config.getDataSource(), config.getIntervals()) + StringUtils.format("%s-determine_partitions-%s", config.getDataSource(), config.getIntervals()) ); job.getConfiguration().set("io.sort.record.percent", "0.19"); @@ -370,7 +366,7 @@ public class JobHelper for (Jobby job : jobs) { if (failedMessage == null) { if (!job.run()) { - failedMessage = String.format("Job[%s] failed!", job.getClass()); + failedMessage = StringUtils.format("Job[%s] failed!", job.getClass()); } } } @@ -442,12 +438,10 @@ public class JobHelper .withBinaryVersion(SegmentUtils.getVersionFromDir(mergedBase)); if (!renameIndexFiles(outputFS, tmpPath, finalIndexZipFilePath)) { - throw new IOException( - String.format( - "Unable to rename [%s] to [%s]", - tmpPath.toUri().toString(), - finalIndexZipFilePath.toUri().toString() - ) + throw new IOE( + "Unable to rename [%s] to [%s]", + tmpPath.toUri().toString(), + finalIndexZipFilePath.toUri().toString() ); } @@ -478,7 +472,7 @@ public class JobHelper progressable.progress(); if (outputFS.exists(descriptorPath)) { if (!outputFS.delete(descriptorPath, false)) { - throw new IOException(String.format("Failed to delete descriptor at [%s]", descriptorPath)); + throw new IOE("Failed to delete descriptor at [%s]", descriptorPath); } } try (final OutputStream descriptorOut = outputFS.create( @@ -586,9 +580,9 @@ public class JobHelper { return new Path( prependFSIfNullScheme(fs, basePath), - String.format("./%s.%d", - dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP), - taskAttemptID.getId() + StringUtils.format("./%s.%d", + dataSegmentPusher.makeIndexPathName(segmentTemplate, JobHelper.INDEX_ZIP), + taskAttemptID.getId() ) ); } @@ -739,10 +733,10 @@ public class JobHelper final URI segmentLocURI; if ("s3_zip".equals(type)) { if ("s3a".equals(loadSpec.get("S3Schema"))) { - segmentLocURI = URI.create(String.format("s3a://%s/%s", loadSpec.get("bucket"), loadSpec.get("key"))); + segmentLocURI = URI.create(StringUtils.format("s3a://%s/%s", loadSpec.get("bucket"), loadSpec.get("key"))); } else { - segmentLocURI = URI.create(String.format("s3n://%s/%s", loadSpec.get("bucket"), loadSpec.get("key"))); + segmentLocURI = URI.create(StringUtils.format("s3n://%s/%s", loadSpec.get("bucket"), loadSpec.get("key"))); } } else if ("hdfs".equals(type)) { segmentLocURI = URI.create(loadSpec.get("path").toString()); @@ -755,7 +749,7 @@ public class JobHelper // getHdfsStorageDir. But that wouldn't fix this issue for people who already have segments with ":". // Because of this we just URL encode the : making everything work as it should. segmentLocURI = URI.create( - String.format("gs://%s/%s", loadSpec.get("bucket"), loadSpec.get("path").toString().replace(":", "%3A")) + StringUtils.format("gs://%s/%s", loadSpec.get("bucket"), loadSpec.get("path").toString().replace(":", "%3A")) ); } else if ("local".equals(type)) { try { @@ -809,7 +803,7 @@ public class JobHelper public void startSection(String section) { context.progress(); - context.setStatus(String.format("STARTED [%s]", section)); + context.setStatus(StringUtils.format("STARTED [%s]", section)); } @Override @@ -817,14 +811,14 @@ public class JobHelper { log.info("Progress message for section [%s] : [%s]", section, message); context.progress(); - context.setStatus(String.format("PROGRESS [%s]", section)); + context.setStatus(StringUtils.format("PROGRESS [%s]", section)); } @Override public void stopSection(String section) { context.progress(); - context.setStatus(String.format("STOPPED [%s]", section)); + context.setStatus(StringUtils.format("STOPPED [%s]", section)); } }; } diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java index 8bacae47377..6d9a6b91b92 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/path/GranularUnprocessedPathSpec.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.indexer.HadoopDruidIndexerConfig; import io.druid.indexer.hadoop.FSSpideringIterator; @@ -64,7 +65,7 @@ public class GranularUnprocessedPathSpec extends GranularityPathSpec // This PathSpec breaks so many abstractions that we might as break some more Preconditions.checkState( config.getGranularitySpec() instanceof UniformGranularitySpec, - String.format( + StringUtils.format( "Cannot use %s without %s", GranularUnprocessedPathSpec.class.getSimpleName(), UniformGranularitySpec.class.getSimpleName() @@ -90,7 +91,7 @@ public class GranularUnprocessedPathSpec extends GranularityPathSpec DateTime timeBucket = new DateTime(entry.getKey()); long mTime = entry.getValue(); - String bucketOutput = String.format( + String bucketOutput = StringUtils.format( "%s/%s", config.getSchema().getIOConfig().getSegmentOutputPath(), segmentGranularity.toPath(timeBucket) diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java index 5fd7b8d6677..26f908ce5a5 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/updater/HadoopConverterJob.java @@ -34,6 +34,7 @@ import io.druid.indexer.hadoop.DatasourceInputSplit; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.apache.commons.io.FileUtils; @@ -85,7 +86,7 @@ public class HadoopConverterJob if (segments.size() == 1) { final DataSegment segment = segments.get(0); jobConf.setJobName( - String.format( + StringUtils.format( "druid-convert-%s-%s-%s", segment.getDataSource(), segment.getInterval(), @@ -120,7 +121,7 @@ public class HadoopConverterJob ) ); jobConf.setJobName( - String.format( + StringUtils.format( "druid-convert-%s-%s", Arrays.toString(dataSources.toArray()), Arrays.toString(versions.toArray()) diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java index 95491047024..bfee0b37673 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/BatchDeltaIngestionTest.java @@ -35,6 +35,7 @@ import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.indexer.hadoop.WindowedDataSegment; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.LongSumAggregatorFactory; @@ -280,7 +281,7 @@ public class BatchDeltaIngestionTest JobHelper.runJobs(ImmutableList.of(job), config); File segmentFolder = new File( - String.format( + StringUtils.format( "%s/%s/%s_%s/%s/0", config.getSchema().getIOConfig().getSegmentOutputPath(), config.getSchema().getDataSchema().getDataSource(), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java index 497690da4b7..0a3845c4899 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HdfsClasspathSetupTest.java @@ -24,6 +24,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.common.utils.UUIDUtils; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.StringUtils; import junit.framework.Assert; import org.apache.commons.io.FileUtils; @@ -73,7 +74,7 @@ public class HdfsClasspathSetupTest { hdfsTmpDir = File.createTempFile("hdfsClasspathSetupTest", "dir"); if (!hdfsTmpDir.delete()) { - throw new IOException(String.format("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath())); + throw new IOE("Unable to delete hdfsTmpDir [%s]", hdfsTmpDir.getAbsolutePath()); } conf = new Configuration(true); conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTmpDir.getAbsolutePath()); @@ -84,8 +85,8 @@ public class HdfsClasspathSetupTest public void setUp() throws IOException { // intermedatePath and finalClasspath are relative to hdfsTmpDir directory. - intermediatePath = new Path(String.format("/tmp/classpath/%s", UUIDUtils.generateUuid())); - finalClasspath = new Path(String.format("/tmp/intermediate/%s", UUIDUtils.generateUuid())); + intermediatePath = new Path(StringUtils.format("/tmp/classpath/%s", UUIDUtils.generateUuid())); + finalClasspath = new Path(StringUtils.format("/tmp/intermediate/%s", UUIDUtils.generateUuid())); dummyJarFile = tempFolder.newFile("dummy-test.jar"); Files.copy( new ByteArrayInputStream(StringUtils.toUtf8(dummyJarString)), diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java index 37c2891b154..bc72a60c362 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/IndexGeneratorJobTest.java @@ -31,6 +31,7 @@ import io.druid.data.input.impl.InputRowParser; import io.druid.data.input.impl.JSONParseSpec; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; +import io.druid.java.util.common.RE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.aggregation.AggregatorFactory; @@ -542,7 +543,7 @@ public class IndexGeneratorJobTest specs.add(new SingleDimensionShardSpec("host", shardInfo[0], shardInfo[1], partitionNum++)); } } else { - throw new RuntimeException(String.format("Invalid partition type:[%s]", partitionType)); + throw new RE("Invalid partition type:[%s]", partitionType); } return specs; @@ -583,7 +584,7 @@ public class IndexGeneratorJobTest for (DateTime currTime = interval.getStart(); currTime.isBefore(interval.getEnd()); currTime = currTime.plusDays(1)) { Object[][] shardInfo = shardInfoForEachSegment[segmentNum++]; File segmentOutputFolder = new File( - String.format( + StringUtils.format( "%s/%s/%s_%s/%s", config.getSchema().getIOConfig().getSegmentOutputPath(), config.getSchema().getDataSchema().getDataSource(), @@ -643,7 +644,7 @@ public class IndexGeneratorJobTest Assert.assertEquals(singleDimensionShardInfo[0], spec.getStart()); Assert.assertEquals(singleDimensionShardInfo[1], spec.getEnd()); } else { - throw new RuntimeException(String.format("Invalid partition type:[%s]", partitionType)); + throw new RE("Invalid partition type:[%s]", partitionType); } } } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java index 154e03399e5..8a3952f0fb5 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/GranularityPathSpecTest.java @@ -29,6 +29,7 @@ import io.druid.indexer.HadoopIOConfig; import io.druid.indexer.HadoopIngestionSpec; import io.druid.indexer.HadoopTuningConfig; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -180,10 +181,10 @@ public class GranularityPathSpecTest String actual = job.getConfiguration().get("mapreduce.input.multipleinputs.dir.formats"); String expected = Joiner.on(",").join(Lists.newArrayList( - String.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=00/file1"), - String.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=02/file2"), - String.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=05/file3"), - String.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=05/file4") + StringUtils.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=00/file1"), + StringUtils.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=02/file2"), + StringUtils.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=05/file3"), + StringUtils.format(formatStr, testFolder.getRoot(), "test/y=2015/m=11/d=06/H=05/file4") )); Assert.assertEquals("Did not find expected input paths", expected, actual); @@ -231,9 +232,9 @@ public class GranularityPathSpecTest String expected = Joiner.on(",").join( Lists.newArrayList( - String.format(formatStr, testFolder.getRoot(), "test/2015/01/01/18/file3"), - String.format(formatStr, testFolder.getRoot(), "test/2015/01/02/00/file1"), - String.format(formatStr, testFolder.getRoot(), "test/2015/01/02/03/file2") + StringUtils.format(formatStr, testFolder.getRoot(), "test/2015/01/01/18/file3"), + StringUtils.format(formatStr, testFolder.getRoot(), "test/2015/01/02/00/file1"), + StringUtils.format(formatStr, testFolder.getRoot(), "test/2015/01/02/03/file2") ) ); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java index 78224e76d61..ccc96d83b5d 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/actions/RemoteTaskActionClient.java @@ -32,9 +32,9 @@ import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.indexing.common.RetryPolicy; import io.druid.indexing.common.RetryPolicyFactory; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.logger.Logger; - import org.jboss.netty.channel.ChannelException; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Duration; @@ -121,12 +121,10 @@ public class RemoteTaskActionClient implements TaskActionClient return jsonMapper.convertValue(responseDict.get("result"), taskAction.getReturnTypeReference()); } else { // Want to retry, so throw an IOException. - throw new IOException( - String.format( - "Scary HTTP status returned: %s. Check your overlord[%s] logs for exceptions.", - response.getStatus(), - server.getHost() - ) + throw new IOE( + "Scary HTTP status returned: %s. Check your overlord[%s] logs for exceptions.", + response.getStatus(), + server.getHost() ); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java index 08b881f32d5..808cda53b94 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/index/YeOldePlumberSchool.java @@ -32,6 +32,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.query.Query; import io.druid.query.QueryRunner; @@ -248,7 +249,7 @@ public class YeOldePlumberSchool implements PlumberSchool private File getSpillDir(final int n) { - return new File(persistDir, String.format("spill%d", n)); + return new File(persistDir, StringUtils.format("spill%d", n)); } }; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java index 94b295ac382..b223bdabcf1 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/ConvertSegmentTask.java @@ -33,6 +33,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentInsertAction; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.logger.Logger; import io.druid.segment.IndexIO; @@ -396,7 +397,7 @@ public class ConvertSegmentTask extends AbstractFixedIntervalTask // Appending to the version makes a new version that inherits most comparability parameters of the original // version, but is "newer" than said original version. - DataSegment updatedSegment = segment.withVersion(String.format("%s_v%s", segment.getVersion(), outVersion)); + DataSegment updatedSegment = segment.withVersion(StringUtils.format("%s_v%s", segment.getVersion(), outVersion)); updatedSegment = toolbox.getSegmentPusher().push(outLocation, updatedSegment); actionClient.submit(new SegmentInsertAction(Sets.newHashSet(updatedSegment))); diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java index 54b6c5ae308..5f4833ee539 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/HadoopIndexTask.java @@ -44,6 +44,7 @@ import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockTryAcquireAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.hadoop.OverlordActionBasedUsedSegmentLister; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import org.joda.time.DateTime; @@ -93,7 +94,7 @@ public class HadoopIndexTask extends HadoopTask ) { super( - id != null ? id : String.format("index_hadoop_%s_%s", getTheDataSource(spec), new DateTime()), + id != null ? id : StringUtils.format("index_hadoop_%s_%s", getTheDataSource(spec), new DateTime()), getTheDataSource(spec), hadoopDependencyCoordinates == null ? (hadoopCoordinates == null ? null : ImmutableList.of(hadoopCoordinates)) diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java index ce02f4adf00..cf705e64915 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/IndexTask.java @@ -58,6 +58,7 @@ import io.druid.indexing.common.actions.SegmentTransactionalInsertAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.firehose.IngestSegmentFirehoseFactory; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; import io.druid.java.util.common.logger.Logger; @@ -113,7 +114,7 @@ public class IndexTask extends AbstractTask private static String makeId(String id, IndexIngestionSpec ingestionSchema) { - return id != null ? id : String.format("index_%s_%s", makeDataSource(ingestionSchema), new DateTime()); + return id != null ? id : StringUtils.format("index_%s_%s", makeDataSource(ingestionSchema), new DateTime()); } private static String makeDataSource(IndexIngestionSpec ingestionSchema) @@ -456,7 +457,7 @@ public class IndexTask extends AbstractTask final ShardSpec shardSpec = shardSpecLookups.get(interval) .getShardSpec(inputRow.getTimestampFromEpoch(), inputRow); - final String sequenceName = String.format("index_%s_%s_%d", interval, version, shardSpec.getPartitionNum()); + final String sequenceName = StringUtils.format("index_%s_%s_%d", interval, version, shardSpec.getPartitionNum()); if (!sequenceNameToShardSpecMap.containsKey(sequenceName)) { final ShardSpec shardSpecForPublishing = ingestionSchema.getTuningConfig().isForceExtendableShardSpecs() diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java index de49180bf08..6d9292db624 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/MergeTaskBase.java @@ -43,6 +43,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.SegmentListUsedAction; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.segment.IndexIO; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -73,7 +74,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask { super( // _not_ the version, just something uniqueish - id != null ? id : String.format( + id != null ? id : StringUtils.format( "merge_%s_%s", computeProcessingID(dataSource, segments), new DateTime().toString() ), dataSource, @@ -265,7 +266,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask @Override public String apply(DataSegment x) { - return String.format( + return StringUtils.format( "%s_%s_%s_%s", x.getInterval().getStart(), x.getInterval().getEnd(), @@ -277,7 +278,7 @@ public abstract class MergeTaskBase extends AbstractFixedIntervalTask ) ); - return String.format( + return StringUtils.format( "%s_%s", dataSource, Hashing.sha1().hashString(segmentIDs, Charsets.UTF_8).toString() diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java index aa041f09e33..487b8e401ee 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/NoopTask.java @@ -28,6 +28,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import org.joda.time.DateTime; @@ -73,7 +74,7 @@ public class NoopTask extends AbstractTask ) { super( - id == null ? String.format("noop_%s_%s", new DateTime(), UUID.randomUUID().toString()) : id, + id == null ? StringUtils.format("noop_%s_%s", new DateTime(), UUID.randomUUID().toString()) : id, "none", context ); @@ -82,7 +83,7 @@ public class NoopTask extends AbstractTask this.isReadyTime = (isReadyTime == 0) ? defaultIsReadyTime : isReadyTime; this.isReadyResult = (isReadyResult == null) ? defaultIsReadyResult - : IsReadyResult.valueOf(isReadyResult.toUpperCase()); + : IsReadyResult.valueOf(StringUtils.toUpperCase(isReadyResult)); this.firehoseFactory = firehoseFactory; } diff --git a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java index 3e36acff121..f55b7f2471f 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/task/RealtimeIndexTask.java @@ -37,6 +37,7 @@ import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.LockAcquireAction; import io.druid.indexing.common.actions.LockReleaseAction; import io.druid.indexing.common.actions.TaskActionClient; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.query.DruidMetrics; import io.druid.query.FinalizeResultsQueryRunner; @@ -96,7 +97,7 @@ public class RealtimeIndexTask extends AbstractTask for (int i = 0; i < Ints.BYTES * 2; ++i) { suffix.append((char) ('a' + ((randomBits >>> (i * 4)) & 0x0F))); } - return String.format( + return StringUtils.format( "index_realtime_%s_%d_%s_%s", dataSource, partitionNumber, @@ -144,7 +145,7 @@ public class RealtimeIndexTask extends AbstractTask { super( id == null ? makeTaskId(fireDepartment) : id, - String.format("index_realtime_%s", makeDatasource(fireDepartment)), + StringUtils.format("index_realtime_%s", makeDatasource(fireDepartment)), taskResource, makeDatasource(fireDepartment), context diff --git a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java index 9e36fdca818..b4b3af8f9d5 100644 --- a/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java +++ b/indexing-service/src/main/java/io/druid/indexing/common/tasklogs/FileTaskLogs.java @@ -24,6 +24,8 @@ import com.google.common.io.ByteSource; import com.google.common.io.Files; import com.google.inject.Inject; import io.druid.indexing.common.config.FileTaskLogsConfig; +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.tasklogs.TaskLogs; import org.apache.commons.io.FileUtils; @@ -55,7 +57,7 @@ public class FileTaskLogs implements TaskLogs Files.copy(file, outputFile); log.info("Wrote task log to: %s", outputFile); } else { - throw new IOException(String.format("Unable to create task log dir[%s]", config.getDirectory())); + throw new IOE("Unable to create task log dir[%s]", config.getDirectory()); } } @@ -81,7 +83,7 @@ public class FileTaskLogs implements TaskLogs private File fileForTask(final String taskid) { - return new File(config.getDirectory(), String.format("%s.log", taskid)); + return new File(config.getDirectory(), StringUtils.format("%s.log", taskid)); } @Override @@ -98,7 +100,7 @@ public class FileTaskLogs implements TaskLogs if (taskLogDir.exists()) { if (!taskLogDir.isDirectory()) { - throw new IOException(String.format("taskLogDir [%s] must be a directory.", taskLogDir)); + throw new IOE("taskLogDir [%s] must be a directory.", taskLogDir); } File[] files = taskLogDir.listFiles( diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java index b6818124290..c57959ed2de 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/ForkingTaskRunner.java @@ -53,6 +53,8 @@ import io.druid.indexing.common.tasklogs.LogUtils; import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.config.ForkingTaskRunnerConfig; import io.druid.indexing.worker.config.WorkerConfig; +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; @@ -245,7 +247,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer final Closer closer = Closer.create(); try { if (!attemptDir.mkdirs()) { - throw new IOException(String.format("Could not create directories: %s", attemptDir)); + throw new IOE("Could not create directories: %s", attemptDir); } final File taskFile = new File(taskDir, "task.json"); @@ -309,7 +311,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer && !ForkingTaskRunnerConfig.JAVA_OPTS_ARRAY_PROPERTY.equals(propName) ) { command.add( - String.format( + StringUtils.format( "-D%s=%s", propName, props.getProperty(propName) @@ -323,7 +325,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer for (String propName : props.stringPropertyNames()) { if (propName.startsWith(CHILD_PROPERTY_PREFIX)) { command.add( - String.format( + StringUtils.format( "-D%s=%s", propName.substring(CHILD_PROPERTY_PREFIX.length()), props.getProperty(propName) @@ -338,7 +340,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer for (String propName : context.keySet()) { if (propName.startsWith(CHILD_PROPERTY_PREFIX)) { command.add( - String.format( + StringUtils.format( "-D%s=%s", propName.substring(CHILD_PROPERTY_PREFIX.length()), task.getContextValue(propName) @@ -350,7 +352,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer // Add dataSource and taskId for metrics or logging command.add( - String.format( + StringUtils.format( "-D%s%s=%s", MonitorsConfig.METRIC_DIMENSION_PREFIX, DruidMetrics.DATASOURCE, @@ -358,7 +360,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer ) ); command.add( - String.format( + StringUtils.format( "-D%s%s=%s", MonitorsConfig.METRIC_DIMENSION_PREFIX, DruidMetrics.TASK_ID, @@ -366,8 +368,8 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer ) ); - command.add(String.format("-Ddruid.host=%s", childHost)); - command.add(String.format("-Ddruid.port=%d", childPort)); + command.add(StringUtils.format("-Ddruid.host=%s", childHost)); + command.add(StringUtils.format("-Ddruid.port=%d", childPort)); /** * These are not enabled per default to allow the user to either set or not set them * Users are highly suggested to be set in druid.indexer.runner.javaOpts @@ -378,14 +380,14 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer */ if (config.isSeparateIngestionEndpoint()) { - command.add(String.format( + command.add(StringUtils.format( "-Ddruid.indexer.task.chathandler.service=%s", "placeholder/serviceName" )); // Actual serviceName will be passed by the EventReceiverFirehose when it registers itself with ChatHandlerProvider // Thus, "placeholder/serviceName" will be ignored - command.add(String.format("-Ddruid.indexer.task.chathandler.host=%s", childHost)); - command.add(String.format( + command.add(StringUtils.format("-Ddruid.indexer.task.chathandler.host=%s", childHost)); + command.add(StringUtils.format( "-Ddruid.indexer.task.chathandler.port=%d", childChatHandlerPort )); @@ -432,7 +434,7 @@ public class ForkingTaskRunner implements TaskRunner, TaskLogStreamer // This will block for a while. So we append the thread information with more details final String priorThreadName = Thread.currentThread().getName(); - Thread.currentThread().setName(String.format("%s-[%s]", priorThreadName, task.getId())); + Thread.currentThread().setName(StringUtils.format("%s-[%s]", priorThreadName, task.getId())); try (final OutputStream toLogfile = logSink.openStream()) { ByteStreams.copy(processHolder.process.getInputStream(), toLogfile); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java index 49c192031e9..95cff9a35fb 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/MetadataTaskStorage.java @@ -34,6 +34,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.actions.TaskAction; import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.common.task.Task; +import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -162,7 +163,7 @@ public class MetadataTaskStorage implements TaskStorage status ); if (!set) { - throw new IllegalStateException(String.format("Active task not found: %s", status.getId())); + throw new ISE("Active task not found: %s", status.getId()); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java index fb66e53e610..14543347fd2 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/RemoteTaskRunner.java @@ -63,6 +63,7 @@ import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.overlord.setup.WorkerSelectStrategy; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; @@ -525,7 +526,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer } URL url = null; try { - url = makeWorkerURL(zkWorker.getWorker(), String.format("/task/%s/shutdown", taskId)); + url = makeWorkerURL(zkWorker.getWorker(), StringUtils.format("/task/%s/shutdown", taskId)); final StatusResponseHolder response = httpClient.go( new Request(HttpMethod.POST, url), RESPONSE_HANDLER, @@ -563,7 +564,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer return Optional.absent(); } else { // Worker is still running this task - final URL url = makeWorkerURL(zkWorker.getWorker(), String.format("/task/%s/log?offset=%d", taskId, offset)); + final URL url = makeWorkerURL(zkWorker.getWorker(), StringUtils.format("/task/%s/log?offset=%d", taskId, offset)); return Optional.of( new ByteSource() { @@ -595,7 +596,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer Preconditions.checkArgument(path.startsWith("/"), "path must start with '/': %s", path); try { - return new URL(String.format("http://%s/druid/worker/v1%s", worker.getHost(), path)); + return new URL(StringUtils.format("http://%s/druid/worker/v1%s", worker.getHost(), path)); } catch (MalformedURLException e) { throw Throwables.propagate(e); @@ -995,7 +996,7 @@ public class RemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer if (zkWorkers.putIfAbsent(worker.getHost(), zkWorker) == null) { retVal.set(zkWorker); } else { - final String message = String.format( + final String message = StringUtils.format( "WTF?! Tried to add already-existing worker[%s]", worker.getHost() ); diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java index 9683d5dcf5d..4b25c540b63 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/OverlordResource.java @@ -53,6 +53,7 @@ import io.druid.indexing.overlord.autoscaling.ScalingStats; import io.druid.indexing.overlord.http.security.TaskResourceFilter; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.EntryExistsException; import io.druid.server.http.security.ConfigResourceFilter; @@ -163,7 +164,7 @@ public class OverlordResource } catch (EntryExistsException e) { return Response.status(Response.Status.BAD_REQUEST) - .entity(ImmutableMap.of("error", String.format("Task[%s] already exists!", task.getId()))) + .entity(ImmutableMap.of("error", StringUtils.format("Task[%s] already exists!", task.getId()))) .build(); } } @@ -506,7 +507,7 @@ public class OverlordResource if (!optionalTask.isPresent()) { throw new WebApplicationException( Response.serverError().entity( - String.format("No task information found for task with id: [%s]", taskId) + StringUtils.format("No task information found for task with id: [%s]", taskId) ).build() ); } @@ -697,7 +698,7 @@ public class OverlordResource if (!optionalTask.isPresent()) { throw new WebApplicationException( Response.serverError().entity( - String.format("No task information found for task with id: [%s]", taskId) + StringUtils.format("No task information found for task with id: [%s]", taskId) ).build() ); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java index fd6893455b3..6bab756ef33 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/SupervisorResourceFilter.java @@ -28,6 +28,7 @@ import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; import io.druid.indexing.overlord.supervisor.SupervisorManager; import io.druid.indexing.overlord.supervisor.SupervisorSpec; +import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.AbstractResourceFilter; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; @@ -77,7 +78,7 @@ public class SupervisorResourceFilter extends AbstractResourceFilter if (!supervisorSpecOptional.isPresent()) { throw new WebApplicationException( Response.status(Response.Status.BAD_REQUEST) - .entity(String.format("Cannot find any supervisor with id: [%s]", supervisorId)) + .entity(StringUtils.format("Cannot find any supervisor with id: [%s]", supervisorId)) .build() ); } @@ -102,7 +103,7 @@ public class SupervisorResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN) .entity( - String.format("Access-Check-Result: %s", authResult.toString()) + StringUtils.format("Access-Check-Result: %s", authResult.toString()) ) .build()); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java index af832236893..310de74a782 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/http/security/TaskResourceFilter.java @@ -28,6 +28,7 @@ import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TaskStorageQueryAdapter; +import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.AbstractResourceFilter; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; @@ -83,7 +84,7 @@ public class TaskResourceFilter extends AbstractResourceFilter if (!taskOptional.isPresent()) { throw new WebApplicationException( Response.status(Response.Status.BAD_REQUEST) - .entity(String.format("Cannot find any task with id: [%s]", taskId)) + .entity(StringUtils.format("Cannot find any task with id: [%s]", taskId)) .build() ); } @@ -101,7 +102,7 @@ public class TaskResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException(Response.status(Response.Status.FORBIDDEN) .entity( - String.format("Access-Check-Result: %s", authResult.toString()) + StringUtils.format("Access-Check-Result: %s", authResult.toString()) ) .build()); } diff --git a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java index 1f23a9aa4cd..365003573a6 100644 --- a/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/overlord/supervisor/SupervisorResource.java @@ -31,6 +31,7 @@ import com.google.inject.Inject; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.indexing.overlord.TaskMaster; import io.druid.indexing.overlord.http.security.SupervisorResourceFilter; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; @@ -176,7 +177,7 @@ public class SupervisorResource Optional spec = manager.getSupervisorSpec(id); if (!spec.isPresent()) { return Response.status(Response.Status.NOT_FOUND) - .entity(ImmutableMap.of("error", String.format("[%s] does not exist", id))) + .entity(ImmutableMap.of("error", StringUtils.format("[%s] does not exist", id))) .build(); } @@ -201,7 +202,7 @@ public class SupervisorResource Optional spec = manager.getSupervisorStatus(id); if (!spec.isPresent()) { return Response.status(Response.Status.NOT_FOUND) - .entity(ImmutableMap.of("error", String.format("[%s] does not exist", id))) + .entity(ImmutableMap.of("error", StringUtils.format("[%s] does not exist", id))) .build(); } @@ -227,7 +228,7 @@ public class SupervisorResource return Response.ok(ImmutableMap.of("id", id)).build(); } else { return Response.status(Response.Status.NOT_FOUND) - .entity(ImmutableMap.of("error", String.format("[%s] does not exist", id))) + .entity(ImmutableMap.of("error", StringUtils.format("[%s] does not exist", id))) .build(); } } @@ -297,7 +298,7 @@ public class SupervisorResource .entity( ImmutableMap.of( "error", - String.format( + StringUtils.format( "No history for [%s] (history available for %s)", id, history.keySet() @@ -327,7 +328,7 @@ public class SupervisorResource return Response.ok(ImmutableMap.of("id", id)).build(); } else { return Response.status(Response.Status.NOT_FOUND) - .entity(ImmutableMap.of("error", String.format("[%s] does not exist", id))) + .entity(ImmutableMap.of("error", StringUtils.format("[%s] does not exist", id))) .build(); } } diff --git a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java index 7336fed209e..4272d778759 100644 --- a/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java +++ b/indexing-service/src/main/java/io/druid/indexing/worker/http/WorkerResource.java @@ -31,6 +31,7 @@ import io.druid.indexing.overlord.TaskRunner; import io.druid.indexing.overlord.TaskRunnerWorkItem; import io.druid.indexing.worker.Worker; import io.druid.indexing.worker.WorkerCuratorCoordinator; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.server.http.security.ConfigResourceFilter; import io.druid.server.http.security.StateResourceFilter; @@ -180,7 +181,7 @@ public class WorkerResource { if (!(taskRunner instanceof TaskLogStreamer)) { return Response.status(501) - .entity(String.format( + .entity(StringUtils.format( "Log streaming not supported by [%s]", taskRunner.getClass().getCanonicalName() )) diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java index 50e05274f38..41814078b69 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/HadoopTaskTest.java @@ -26,6 +26,7 @@ import io.druid.indexing.common.TaskStatus; import io.druid.indexing.common.TaskToolbox; import io.druid.indexing.common.actions.TaskActionClient; import io.druid.indexing.common.config.TaskConfig; +import io.druid.java.util.common.StringUtils; import org.apache.hadoop.yarn.util.ApplicationClassLoader; import org.easymock.EasyMock; import org.junit.Assert; @@ -98,6 +99,6 @@ public class HadoopTaskTest Assert.assertTrue(classLoader instanceof URLClassLoader); final ClassLoader appLoader = HadoopDruidConverterConfig.class.getClassLoader(); - Assert.assertNotEquals(String.format("ClassLoader [%s] is not isolated!", classLoader), appLoader, classLoader); + Assert.assertNotEquals(StringUtils.format("ClassLoader [%s] is not isolated!", classLoader), appLoader, classLoader); } } diff --git a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java index 82d09be4c44..a060f78591c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -781,7 +781,7 @@ public class RealtimeIndexTaskTest // Corrupt the data: final File smooshFile = new File( - String.format( + StringUtils.format( "%s/persistent/task/%s/work/persist/%s/%s_%s/0/00000.smoosh", directory, task1.getId(), diff --git a/indexing-service/src/test/java/io/druid/indexing/common/tasklogs/FileTaskLogsTest.java b/indexing-service/src/test/java/io/druid/indexing/common/tasklogs/FileTaskLogsTest.java index 5d3de16b6d1..1d90a58ff75 100644 --- a/indexing-service/src/test/java/io/druid/indexing/common/tasklogs/FileTaskLogsTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/common/tasklogs/FileTaskLogsTest.java @@ -61,7 +61,7 @@ public class FileTaskLogsTest for (Map.Entry entry : expected.entrySet()) { final byte[] bytes = ByteStreams.toByteArray(taskLogs.streamTaskLog("foo", entry.getKey()).get().getInput()); final String string = StringUtils.fromUtf8(bytes); - Assert.assertEquals(String.format("Read with offset %,d", entry.getKey()), string, entry.getValue()); + Assert.assertEquals(StringUtils.format("Read with offset %,d", entry.getKey()), string, entry.getValue()); } } finally { diff --git a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 1341e2f0706..067ec8b4cd4 100644 --- a/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -54,6 +54,8 @@ import io.druid.indexing.common.config.TaskStorageConfig; import io.druid.indexing.overlord.HeapMemoryTaskStorage; import io.druid.indexing.overlord.TaskLockbox; import io.druid.indexing.overlord.supervisor.SupervisorManager; +import io.druid.java.util.common.IOE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.IndexerSQLMetadataStorageCoordinator; import io.druid.query.aggregation.DoubleSumAggregatorFactory; @@ -145,7 +147,7 @@ public class IngestSegmentFirehoseFactoryTest } if (!persistDir.mkdirs() && !persistDir.exists()) { - throw new IOException(String.format("Could not create directory at [%s]", persistDir.getAbsolutePath())); + throw new IOE("Could not create directory at [%s]", persistDir.getAbsolutePath()); } INDEX_MERGER_V9.persist(index, persistDir, indexSpec); @@ -333,7 +335,7 @@ public class IngestSegmentFirehoseFactoryTest ), INDEX_IO ), - String.format( + StringUtils.format( "DimNames[%s]MetricNames[%s]ParserDimNames[%s]", dim_names == null ? "null" : "dims", metric_names == null ? "null" : "metrics", diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index db3f5c25950..f1e2544622a 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -36,6 +36,7 @@ import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.overlord.setup.WorkerBehaviorConfig; import io.druid.indexing.worker.TaskAnnouncement; import io.druid.indexing.worker.Worker; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; @@ -53,9 +54,9 @@ public class RemoteTaskRunnerTestUtils { static final Joiner joiner = Joiner.on("/"); static final String basePath = "/test/druid"; - static final String announcementsPath = String.format("%s/indexer/announcements", basePath); - static final String tasksPath = String.format("%s/indexer/tasks", basePath); - static final String statusPath = String.format("%s/indexer/status", basePath); + static final String announcementsPath = StringUtils.format("%s/indexer/announcements", basePath); + static final String tasksPath = StringUtils.format("%s/indexer/tasks", basePath); + static final String statusPath = StringUtils.format("%s/indexer/status", basePath); static final TaskLocation DUMMY_LOCATION = TaskLocation.create("dummy", 9000); private TestingCluster testingCluster; diff --git a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java index 64d0ca48c3d..47cc0b3ca64 100644 --- a/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/overlord/TaskLifecycleTest.java @@ -70,6 +70,8 @@ import io.druid.indexing.test.TestIndexerMetadataStorageCoordinator; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.RE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Comparators; import io.druid.metadata.SQLMetadataStorageActionHandlerFactory; @@ -402,7 +404,7 @@ public class TaskLifecycleTest } default: { - throw new RuntimeException(String.format("Unknown task storage type [%s]", taskStorageType)); + throw new RE("Unknown task storage type [%s]", taskStorageType); } } tsqa = new TaskStorageQueryAdapter(taskStorage); @@ -1168,7 +1170,7 @@ public class TaskLifecycleTest private RealtimeIndexTask newRealtimeIndexTask() { - String taskId = String.format("rt_task_%s", System.currentTimeMillis()); + String taskId = StringUtils.format("rt_task_%s", System.currentTimeMillis()); DataSchema dataSchema = new DataSchema( "test_ds", null, diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java index 524115a5b26..f128d9e9b06 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -38,6 +38,7 @@ import io.druid.indexing.common.config.TaskConfig; import io.druid.indexing.common.task.Task; import io.druid.indexing.overlord.TestRemoteTaskRunnerConfig; import io.druid.indexing.overlord.ThreadPoolTaskRunner; +import io.druid.java.util.common.StringUtils; import io.druid.segment.IndexIO; import io.druid.segment.IndexMergerV9; import io.druid.segment.loading.SegmentLoaderConfig; @@ -67,8 +68,8 @@ public class WorkerTaskMonitorTest { private static final Joiner joiner = Joiner.on("/"); private static final String basePath = "/test/druid"; - private static final String tasksPath = String.format("%s/indexer/tasks/worker", basePath); - private static final String statusPath = String.format("%s/indexer/status/worker", basePath); + private static final String tasksPath = StringUtils.format("%s/indexer/tasks/worker", basePath); + private static final String statusPath = StringUtils.format("%s/indexer/status/worker", basePath); private static final DruidNode DUMMY_NODE = new DruidNode("dummy", "dummy", 9000); private TestingCluster testingCluster; diff --git a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java index edfce8a296a..987314aa14c 100644 --- a/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java +++ b/indexing-service/src/test/java/io/druid/indexing/worker/http/WorkerResourceTest.java @@ -25,6 +25,7 @@ import io.druid.indexing.overlord.config.RemoteTaskRunnerConfig; import io.druid.indexing.worker.Worker; import io.druid.indexing.worker.WorkerCuratorCoordinator; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.server.initialization.IndexerZkConfig; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.framework.CuratorFramework; @@ -44,7 +45,7 @@ public class WorkerResourceTest { private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); private static final String basePath = "/test/druid"; - private static final String announcementsPath = String.format("%s/indexer/announcements/host", basePath); + private static final String announcementsPath = StringUtils.format("%s/indexer/announcements/host", basePath); private TestingCluster testingCluster; private CuratorFramework cf; diff --git a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java index 9411fa2761d..db8f973ff51 100644 --- a/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java +++ b/indexing-service/src/test/java/io/druid/server/initialization/IndexerZkConfigTest.java @@ -31,6 +31,7 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.JsonConfigurator; import io.druid.initialization.Initialization; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; @@ -75,12 +76,12 @@ public class IndexerZkConfigTest { for (Field field : IndexerZkConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - clobberableProperties.add(String.format("%s.%s", indexerPropertyString, field.getName())); + clobberableProperties.add(StringUtils.format("%s.%s", indexerPropertyString, field.getName())); } } for (Field field : ZkPathsConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - clobberableProperties.add(String.format("%s.%s", zkServiceConfigString, field.getName())); + clobberableProperties.add(StringUtils.format("%s.%s", zkServiceConfigString, field.getName())); } } } @@ -103,10 +104,10 @@ public class IndexerZkConfigTest { for (Field field : ZkPathsConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - String property = String.format("%s.%s", zkServiceConfigString, field.getName()); - String getter = String.format( + String property = StringUtils.format("%s.%s", zkServiceConfigString, field.getName()); + String getter = StringUtils.format( "get%s%s", - field.getName().substring(0, 1).toUpperCase(), + StringUtils.toUpperCase(field.getName().substring(0, 1)), field.getName().substring(1) ); Method method = ZkPathsConfig.class.getDeclaredMethod(getter); @@ -121,10 +122,10 @@ public class IndexerZkConfigTest { for (Field field : IndexerZkConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - String property = String.format("%s.%s", indexerPropertyString, field.getName()); - String getter = String.format( + String property = StringUtils.format("%s.%s", indexerPropertyString, field.getName()); + String getter = StringUtils.format( "get%s%s", - field.getName().substring(0, 1).toUpperCase(), + StringUtils.toUpperCase(field.getName().substring(0, 1)), field.getName().substring(1) ); Method method = IndexerZkConfig.class.getDeclaredMethod(getter); diff --git a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java index 8c78ef65a8a..f3ae8658778 100644 --- a/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java +++ b/integration-tests/src/main/java/io/druid/testing/ConfigFileConfigProvider.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import java.io.File; @@ -68,27 +69,27 @@ public class ConfigFileConfigProvider implements IntegrationTestingConfigProvide if (routerUrl == null) { String routerHost = props.get("router_host"); if (null != routerHost) { - routerUrl = String.format("http://%s:%s", routerHost, props.get("router_port")); + routerUrl = StringUtils.format("http://%s:%s", routerHost, props.get("router_port")); } } brokerUrl = props.get("broker_url"); if (brokerUrl == null) { - brokerUrl = String.format("http://%s:%s", props.get("broker_host"), props.get("broker_port")); + brokerUrl = StringUtils.format("http://%s:%s", props.get("broker_host"), props.get("broker_port")); } historicalUrl = props.get("historical_url"); if (historicalUrl == null) { - historicalUrl = String.format("http://%s:%s", props.get("historical_host"), props.get("historical_port")); + historicalUrl = StringUtils.format("http://%s:%s", props.get("historical_host"), props.get("historical_port")); } coordinatorUrl = props.get("coordinator_url"); if (coordinatorUrl == null) { - coordinatorUrl = String.format("http://%s:%s", props.get("coordinator_host"), props.get("coordinator_port")); + coordinatorUrl = StringUtils.format("http://%s:%s", props.get("coordinator_host"), props.get("coordinator_port")); } indexerUrl = props.get("indexer_url"); if (indexerUrl == null) { - indexerUrl = String.format("http://%s:%s", props.get("indexer_host"), props.get("indexer_port")); + indexerUrl = StringUtils.format("http://%s:%s", props.get("indexer_host"), props.get("indexer_port")); } middleManagerHost = props.get("middlemanager_host"); diff --git a/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java index c5871d361a1..9c66444d211 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/ClientInfoResourceTestClient.java @@ -29,6 +29,7 @@ import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.TestClient; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -59,7 +60,7 @@ public class ClientInfoResourceTestClient private String getBrokerURL() { - return String.format( + return StringUtils.format( "%s/druid/v2/datasources", brokerUrl ); @@ -71,7 +72,7 @@ public class ClientInfoResourceTestClient StatusResponseHolder response = httpClient.go( new Request( HttpMethod.GET, - new URL(String.format("%s/%s/dimensions?interval=%s", getBrokerURL(), dataSource, interval)) + new URL(StringUtils.format("%s/%s/dimensions?interval=%s", getBrokerURL(), dataSource, interval)) ), responseHandler ).get(); diff --git a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java index 5ccc799f007..fe0c0999909 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/CoordinatorResourceTestClient.java @@ -29,6 +29,7 @@ import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.TestClient; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -60,7 +61,7 @@ public class CoordinatorResourceTestClient private String getCoordinatorURL() { - return String.format( + return StringUtils.format( "%s/druid/coordinator/v1/", coordinator ); @@ -68,12 +69,12 @@ public class CoordinatorResourceTestClient private String getIntervalsURL(String dataSource) { - return String.format("%sdatasources/%s/intervals", getCoordinatorURL(), dataSource); + return StringUtils.format("%sdatasources/%s/intervals", getCoordinatorURL(), dataSource); } private String getLoadStatusURL() { - return String.format("%s%s", getCoordinatorURL(), "loadstatus"); + return StringUtils.format("%s%s", getCoordinatorURL(), "loadstatus"); } // return a list of the segment dates for the specified datasource @@ -124,7 +125,7 @@ public class CoordinatorResourceTestClient try { makeRequest( HttpMethod.DELETE, - String.format( + StringUtils.format( "%sdatasources/%s", getCoordinatorURL(), dataSource @@ -141,7 +142,7 @@ public class CoordinatorResourceTestClient try { makeRequest( HttpMethod.DELETE, - String.format( + StringUtils.format( "%sdatasources/%s/intervals/%s", getCoordinatorURL(), dataSource, interval.toString().replace("/", "_") diff --git a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java index 80e45656468..eb7593c48d9 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/EventReceiverFirehoseTestClient.java @@ -29,6 +29,7 @@ import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.testing.guice.TestClient; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; @@ -69,7 +70,7 @@ public class EventReceiverFirehoseTestClient private String getURL() { - return String.format( + return StringUtils.format( "http://%s/druid/worker/v1/chat/%s/push-events/", host, chatID diff --git a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java index ff0ca4a999f..5996b88b956 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/OverlordResourceTestClient.java @@ -70,7 +70,7 @@ public class OverlordResourceTestClient private String getIndexerURL() { - return String.format( + return StringUtils.format( "%s/druid/indexer/v1/", indexer ); @@ -134,7 +134,7 @@ public class OverlordResourceTestClient try { StatusResponseHolder response = makeRequest( HttpMethod.GET, - String.format( + StringUtils.format( "%stask/%s/status", getIndexerURL(), URLEncoder.encode(taskID, "UTF-8") @@ -176,7 +176,7 @@ public class OverlordResourceTestClient try { StatusResponseHolder response = makeRequest( HttpMethod.GET, - String.format("%s%s", getIndexerURL(), identifier) + StringUtils.format("%s%s", getIndexerURL(), identifier) ); LOG.info("Tasks %s response %s", identifier, response.getContent()); return jsonMapper.readValue( @@ -195,7 +195,7 @@ public class OverlordResourceTestClient try { StatusResponseHolder response = makeRequest( HttpMethod.POST, - String.format( + StringUtils.format( "%stask/%s/shutdown", getIndexerURL(), URLEncoder.encode(taskID, "UTF-8") ) @@ -276,7 +276,7 @@ public class OverlordResourceTestClient try { StatusResponseHolder response = httpClient.go( new Request( - HttpMethod.POST, new URL(String.format("%ssupervisor/%s/shutdown", getIndexerURL(), id)) + HttpMethod.POST, new URL(StringUtils.format("%ssupervisor/%s/shutdown", getIndexerURL(), id)) ), responseHandler ).get(); diff --git a/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java b/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java index ef98b69b7f6..555dbc07ba1 100644 --- a/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java +++ b/integration-tests/src/main/java/io/druid/testing/clients/QueryResourceTestClient.java @@ -30,6 +30,7 @@ import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.query.Query; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.TestClient; @@ -62,7 +63,7 @@ public class QueryResourceTestClient private String getBrokerURL() { - return String.format( + return StringUtils.format( "%s/druid/v2/", routerUrl ); diff --git a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java index 862b3b663ec..1cd320f5d9c 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/ServerDiscoveryUtil.java @@ -21,6 +21,7 @@ package io.druid.testing.utils; import io.druid.client.selector.Server; import io.druid.curator.discovery.ServerDiscoverySelector; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import java.util.concurrent.Callable; @@ -57,7 +58,7 @@ public class ServerDiscoveryUtil return isInstanceReady(serviceProvider); } }, - String.format("Instance %s to get ready", instanceType) + StringUtils.format("Instance %s to get ready", instanceType) ); } diff --git a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java index bb00c7dcf70..f8b854ebb55 100644 --- a/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java +++ b/integration-tests/src/main/java/io/druid/testing/utils/TestQueryHelper.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.inject.Inject; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.logger.Logger; import io.druid.query.Druids; @@ -121,7 +122,7 @@ public class TestQueryHelper private String getBrokerURL() { - return String.format("%s/druid/v2?pretty", broker); + return StringUtils.format("%s/druid/v2?pretty", broker); } @SuppressWarnings("unchecked") diff --git a/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java b/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java index 1a31c5b35b6..f5b30832e10 100644 --- a/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java +++ b/integration-tests/src/main/java/org/testng/DruidTestRunnerFactory.java @@ -27,6 +27,7 @@ import com.metamx.http.client.HttpClient; import com.metamx.http.client.Request; import com.metamx.http.client.response.StatusResponseHandler; import com.metamx.http.client.response.StatusResponseHolder; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; @@ -103,7 +104,7 @@ public class DruidTestRunnerFactory implements ITestRunnerFactory runTests(); } catch (Exception e) { - e.printStackTrace(); + LOG.error(e, ""); throw Throwables.propagate(e); } finally { @@ -131,7 +132,7 @@ public class DruidTestRunnerFactory implements ITestRunnerFactory new Request( HttpMethod.GET, new URL( - String.format( + StringUtils.format( "%s/status", host ) @@ -140,7 +141,7 @@ public class DruidTestRunnerFactory implements ITestRunnerFactory handler ).get(); - System.out.println(response.getStatus() + response.getContent()); + LOG.info("%s %s", response.getStatus(), response.getContent()); if (response.getStatus().equals(HttpResponseStatus.OK)) { return true; } else { @@ -148,7 +149,7 @@ public class DruidTestRunnerFactory implements ITestRunnerFactory } } catch (Throwable e) { - e.printStackTrace(); + LOG.error(e, ""); return false; } } diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java index cdf7f5b946d..320c3f20e76 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaIndexingServiceTest.java @@ -22,6 +22,7 @@ package io.druid.tests.indexer; import com.google.common.base.Throwables; import com.google.inject.Inject; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; @@ -173,7 +174,7 @@ public class ITKafkaIndexingServiceTest extends AbstractIndexerTest num_events++; added += num_events; // construct the event to send - String event = String.format(event_template, event_fmt.print(dt), num_events, 0, num_events); + String event = StringUtils.format(event_template, event_fmt.print(dt), num_events, 0, num_events); LOG.info("sending event: [%s]", event); try { producer.send(new ProducerRecord(TOPIC_NAME, event)).get(); diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java index 07501edf176..5f6e162d69e 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITKafkaTest.java @@ -22,6 +22,7 @@ package io.druid.tests.indexer; import com.google.common.base.Throwables; import com.google.inject.Inject; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.guice.DruidTestModuleFactory; @@ -154,7 +155,7 @@ public class ITKafkaTest extends AbstractIndexerTest for (int i = 0; i < num_events; i++) { added += i; // construct the event to send - String event = String.format( + String event = StringUtils.format( event_template, event_fmt.print(dt), i, 0, i ); diff --git a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java index ba4d8ffa936..38313d15fa3 100644 --- a/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java +++ b/integration-tests/src/test/java/io/druid/tests/indexer/ITRealtimeIndexTaskTest.java @@ -26,6 +26,7 @@ import com.metamx.http.client.HttpClient; import io.druid.curator.discovery.ServerDiscoveryFactory; import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.testing.IntegrationTestingConfig; import io.druid.testing.clients.EventReceiverFirehoseTestClient; @@ -258,7 +259,7 @@ public class ITRealtimeIndexTaskTest extends AbstractIndexerTest private String getRouterURL() { - return String.format( + return StringUtils.format( "%s/druid/v2?pretty", config.getRouterUrl() ); diff --git a/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java index 68c235ac644..eb4360da0f2 100644 --- a/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/CompressionUtils.java @@ -86,7 +86,7 @@ public class CompressionUtils public static long zip(File directory, OutputStream out) throws IOException { if (!directory.isDirectory()) { - throw new IOException(String.format("directory[%s] is not a directory", directory)); + throw new IOE("directory[%s] is not a directory", directory); } final File[] files = directory.listFiles(); @@ -96,7 +96,7 @@ public class CompressionUtils log.info("Adding file[%s] with size[%,d]. Total size so far[%,d]", file, file.length(), totalSize); if (file.length() >= Integer.MAX_VALUE) { zipOut.finish(); - throw new IOException(String.format("file[%s] too large [%,d]", file, file.length())); + throw new IOE("file[%s] too large [%,d]", file, file.length()); } zipOut.putNextEntry(new ZipEntry(file.getName())); totalSize += Files.asByteSource(file).copyTo(zipOut); diff --git a/java-util/src/main/java/io/druid/java/util/common/IAE.java b/java-util/src/main/java/io/druid/java/util/common/IAE.java index d598ea4df1d..106c75539a3 100644 --- a/java-util/src/main/java/io/druid/java/util/common/IAE.java +++ b/java-util/src/main/java/io/druid/java/util/common/IAE.java @@ -25,11 +25,11 @@ public class IAE extends IllegalArgumentException { public IAE(String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public IAE(Throwable cause, String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/IOE.java b/java-util/src/main/java/io/druid/java/util/common/IOE.java index c92d37ad1b4..3d26abd50dc 100644 --- a/java-util/src/main/java/io/druid/java/util/common/IOE.java +++ b/java-util/src/main/java/io/druid/java/util/common/IOE.java @@ -27,11 +27,11 @@ public class IOE extends IOException { public IOE(String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public IOE(Throwable cause, String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/ISE.java b/java-util/src/main/java/io/druid/java/util/common/ISE.java index e3d8c6ed63a..2e5670ae2e1 100644 --- a/java-util/src/main/java/io/druid/java/util/common/ISE.java +++ b/java-util/src/main/java/io/druid/java/util/common/ISE.java @@ -25,11 +25,11 @@ public class ISE extends IllegalStateException { public ISE(String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public ISE(Throwable cause, String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/RE.java b/java-util/src/main/java/io/druid/java/util/common/RE.java index 2c70a1a8983..6c4f502f7ba 100644 --- a/java-util/src/main/java/io/druid/java/util/common/RE.java +++ b/java-util/src/main/java/io/druid/java/util/common/RE.java @@ -25,11 +25,11 @@ public class RE extends RuntimeException { public RE(String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public RE(Throwable cause, String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java index 97b2de8989e..4022cfa23e8 100644 --- a/java-util/src/main/java/io/druid/java/util/common/StringUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/StringUtils.java @@ -100,7 +100,20 @@ public class StringUtils } } - public static String safeFormat(String message, Object... formatArgs) + /** + * Equivalent of String.format(Locale.ENGLISH, message, formatArgs). + */ + public static String format(String message, Object... formatArgs) + { + return String.format(Locale.ENGLISH, message, formatArgs); + } + + /** + * Formats the string as {@link #format(String, Object...)}, but instead of failing on illegal format, returns the + * concatenated format string and format arguments. Should be used for unimportant formatting like logging, + * exception messages, typically not directly. + */ + public static String nonStrictFormat(String message, Object... formatArgs) { if(formatArgs == null || formatArgs.length == 0) { return message; @@ -116,4 +129,14 @@ public class StringUtils return bob.toString(); } } + + public static String toLowerCase(String s) + { + return s.toLowerCase(Locale.ENGLISH); + } + + public static String toUpperCase(String s) + { + return s.toUpperCase(Locale.ENGLISH); + } } diff --git a/java-util/src/main/java/io/druid/java/util/common/Timing.java b/java-util/src/main/java/io/druid/java/util/common/Timing.java deleted file mode 100644 index e0c9df8b302..00000000000 --- a/java-util/src/main/java/io/druid/java/util/common/Timing.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Metamarkets Group Inc. (Metamarkets) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. Metamarkets licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package io.druid.java.util.common; - -import io.druid.java.util.common.logger.Logger; - -import java.util.concurrent.Callable; - -/** - */ -public class Timing { - public static RetType timeBenchmarkWrapException(String prefix, Callable callable, final Logger log) { - try { - return timeBenchmark(prefix, callable, log); - } catch (Exception e) { - throw new RuntimeException(e); - } - } - - - public static RetType timeBenchmark(String prefix, Callable callable, Logger log) throws Exception { - RetType retVal; - - long startTime = System.currentTimeMillis(); - retVal = callable.call(); - long endTime = System.currentTimeMillis(); - - log.info(String.format("%s completed %,d millis.", prefix, endTime - startTime)); - - return retVal; - } -} diff --git a/java-util/src/main/java/io/druid/java/util/common/UOE.java b/java-util/src/main/java/io/druid/java/util/common/UOE.java index cef10eff1a0..10dbc04f1fa 100644 --- a/java-util/src/main/java/io/druid/java/util/common/UOE.java +++ b/java-util/src/main/java/io/druid/java/util/common/UOE.java @@ -25,11 +25,11 @@ public class UOE extends UnsupportedOperationException { public UOE(String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public UOE(Throwable cause, String formatText, Object... arguments) { - super(StringUtils.safeFormat(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java index acd21e8ed23..5060eb4861c 100644 --- a/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java +++ b/java-util/src/main/java/io/druid/java/util/common/granularity/Granularity.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.primitives.Longs; import io.druid.java.util.common.Cacheable; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.Interval; @@ -53,7 +54,7 @@ public abstract class Granularity implements Cacheable @JsonCreator public static Granularity fromString(String str) { - return GranularityType.valueOf(str.toUpperCase()).getDefaultGranularity(); + return GranularityType.valueOf(StringUtils.toUpperCase(str)).getDefaultGranularity(); } /** diff --git a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java index 27f34f78370..4dd66dba882 100644 --- a/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java +++ b/java-util/src/main/java/io/druid/java/util/common/io/smoosh/FileSmoosher.java @@ -28,8 +28,10 @@ import com.google.common.io.ByteStreams; import com.google.common.primitives.Ints; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.MappedByteBufferHandler; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.logger.Logger; @@ -107,12 +109,12 @@ public class FileSmoosher implements Closeable static File metaFile(File baseDir) { - return new File(baseDir, String.format("meta.%s", FILE_EXTENSION)); + return new File(baseDir, StringUtils.format("meta.%s", FILE_EXTENSION)); } static File makeChunkFile(File baseDir, int i) { - return new File(baseDir, String.format("%05d.%s", i, FILE_EXTENSION)); + return new File(baseDir, StringUtils.format("%05d.%s", i, FILE_EXTENSION)); } public Set getInternalFilenames() @@ -243,9 +245,7 @@ public class FileSmoosher implements Closeable throw new ISE("WTF? Perhaps there is some concurrent modification going on?"); } if (bytesWritten != size) { - throw new IOException( - String.format("Expected [%,d] bytes, only saw [%,d], potential corruption?", size, bytesWritten) - ); + throw new IOE("Expected [%,d] bytes, only saw [%,d], potential corruption?", size, bytesWritten); } // Merge temporary files on to the main smoosh file. mergeWithSmoosher(); @@ -391,7 +391,7 @@ public class FileSmoosher implements Closeable File metaFile = metaFile(baseDir); try (Writer out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(metaFile), Charsets.UTF_8))) { - out.write(String.format("v1,%d,%d", maxChunkSize, outFiles.size())); + out.write(StringUtils.format("v1,%d,%d", maxChunkSize, outFiles.size())); out.write("\n"); for (Map.Entry entry : internalFiles.entrySet()) { diff --git a/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java index a39d5751614..656de5f97c0 100644 --- a/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java +++ b/java-util/src/main/java/io/druid/java/util/common/logger/Logger.java @@ -41,42 +41,42 @@ public class Logger public void trace(String message, Object... formatArgs) { if (log.isTraceEnabled()) { - log.trace(StringUtils.safeFormat(message, formatArgs)); + log.trace(StringUtils.nonStrictFormat(message, formatArgs)); } } public void trace(Throwable t, String message, Object... formatArgs) { if (log.isTraceEnabled()) { - log.trace(StringUtils.safeFormat(message, formatArgs), t); + log.trace(StringUtils.nonStrictFormat(message, formatArgs), t); } } public void debug(String message, Object... formatArgs) { if (log.isDebugEnabled()) { - log.debug(StringUtils.safeFormat(message, formatArgs)); + log.debug(StringUtils.nonStrictFormat(message, formatArgs)); } } public void debug(Throwable t, String message, Object... formatArgs) { if (log.isDebugEnabled()) { - log.debug(StringUtils.safeFormat(message, formatArgs), t); + log.debug(StringUtils.nonStrictFormat(message, formatArgs), t); } } public void info(String message, Object... formatArgs) { if (log.isInfoEnabled()) { - log.info(StringUtils.safeFormat(message, formatArgs)); + log.info(StringUtils.nonStrictFormat(message, formatArgs)); } } public void info(Throwable t, String message, Object... formatArgs) { if (log.isInfoEnabled()) { - log.info(StringUtils.safeFormat(message, formatArgs), t); + log.info(StringUtils.nonStrictFormat(message, formatArgs), t); } } @@ -94,17 +94,17 @@ public class Logger public void warn(String message, Object... formatArgs) { - log.warn(StringUtils.safeFormat(message, formatArgs)); + log.warn(StringUtils.nonStrictFormat(message, formatArgs)); } public void warn(Throwable t, String message, Object... formatArgs) { - log.warn(StringUtils.safeFormat(message, formatArgs), t); + log.warn(StringUtils.nonStrictFormat(message, formatArgs), t); } public void error(String message, Object... formatArgs) { - log.error(StringUtils.safeFormat(message, formatArgs)); + log.error(StringUtils.nonStrictFormat(message, formatArgs)); } /** @@ -121,17 +121,17 @@ public class Logger public void error(Throwable t, String message, Object... formatArgs) { - log.error(StringUtils.safeFormat(message, formatArgs), t); + log.error(StringUtils.nonStrictFormat(message, formatArgs), t); } public void wtf(String message, Object... formatArgs) { - log.error(StringUtils.safeFormat("WTF?!: " + message, formatArgs), new Exception()); + log.error(StringUtils.nonStrictFormat("WTF?!: " + message, formatArgs), new Exception()); } public void wtf(Throwable t, String message, Object... formatArgs) { - log.error(StringUtils.safeFormat("WTF?!: " + message, formatArgs), t); + log.error(StringUtils.nonStrictFormat("WTF?!: " + message, formatArgs), t); } public boolean isTraceEnabled() diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java index 7460b6264b3..906f34be2e2 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/JSONToLowerParser.java @@ -25,6 +25,7 @@ import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import io.druid.java.util.common.StringUtils; import java.util.ArrayList; import java.util.Iterator; @@ -65,7 +66,7 @@ public class JSONToLowerParser extends JSONParser @Override public String apply(String input) { - return input.toLowerCase(); + return StringUtils.toLowerCase(input); } } ) @@ -84,7 +85,7 @@ public class JSONToLowerParser extends JSONParser while (keysIter.hasNext()) { String key = keysIter.next(); - if (exclude.contains(key.toLowerCase())) { + if (exclude.contains(StringUtils.toLowerCase(key))) { continue; } @@ -98,11 +99,11 @@ public class JSONToLowerParser extends JSONParser nodeValue.add(subnodeValue); } } - map.put(key.toLowerCase(), nodeValue); // difference from JSONParser parse() + map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse() } else { final Object nodeValue = valueFunction.apply(node); if (nodeValue != null) { - map.put(key.toLowerCase(), nodeValue); // difference from JSONParser parse() + map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse() } } } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java index ad6ddc4ab2d..ca1f5ff67cf 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParseException.java @@ -19,17 +19,19 @@ package io.druid.java.util.common.parsers; +import io.druid.java.util.common.StringUtils; + /** */ public class ParseException extends RuntimeException { public ParseException(String formatText, Object... arguments) { - super(String.format(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public ParseException(Throwable cause, String formatText, Object... arguments) { - super(String.format(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java index 58fdd9dd4fa..7dfe429b3ba 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ParserUtils.java @@ -21,6 +21,7 @@ package io.druid.java.util.common.parsers; import com.google.common.base.Function; import com.google.common.collect.Sets; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import java.util.ArrayList; @@ -68,7 +69,7 @@ public class ParserUtils Set uniqueNames = Sets.newHashSet(); for (String fieldName : fieldNames) { - String next = fieldName.toLowerCase(); + String next = StringUtils.toLowerCase(fieldName); if (uniqueNames.contains(next)) { duplicates.add(next); } diff --git a/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java b/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java index fede1aa1f98..37c28359957 100644 --- a/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java +++ b/java-util/src/main/java/io/druid/java/util/common/parsers/ToLowerCaseParser.java @@ -20,6 +20,7 @@ package io.druid.java.util.common.parsers; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import java.util.List; import java.util.Map; @@ -41,7 +42,7 @@ public class ToLowerCaseParser implements Parser Map line = baseParser.parse(input); Map retVal = Maps.newLinkedHashMap(); for (Map.Entry entry : line.entrySet()) { - String k = entry.getKey().toLowerCase(); + String k = StringUtils.toLowerCase(entry.getKey()); if(retVal.containsKey(k)) { // Duplicate key, case-insensitively diff --git a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java index 24baeb031c3..f43c50e2f76 100644 --- a/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/CompressionUtilsTest.java @@ -234,7 +234,7 @@ public class CompressionUtilsTest assertGoodDataStream(inputStream); } if (!testFile.delete()) { - throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + throw new IOE("Unable to delete file [%s]", testFile.getAbsolutePath()); } Assert.assertFalse(testFile.exists()); CompressionUtils.gunzip(Files.asByteSource(gzFile), testFile); @@ -256,7 +256,7 @@ public class CompressionUtilsTest assertGoodDataStream(inputStream); } if (!testFile.delete()) { - throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + throw new IOE("Unable to delete file [%s]", testFile.getAbsolutePath()); } Assert.assertFalse(testFile.exists()); CompressionUtils.gunzip(new FileInputStream(gzFile), testFile); @@ -299,7 +299,7 @@ public class CompressionUtilsTest final byte[] bytes = new byte[gzBytes.length]; Assert.assertEquals(bytes.length, inputStream.read(bytes)); Assert.assertArrayEquals( - String.format("Failed on range %d", i), + StringUtils.format("Failed on range %d", i), gzBytes, bytes ); @@ -494,7 +494,7 @@ public class CompressionUtilsTest assertGoodDataStream(inputStream); } if (!testFile.delete()) { - throw new IOException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + throw new IOE("Unable to delete file [%s]", testFile.getAbsolutePath()); } Assert.assertFalse(testFile.exists()); CompressionUtils.gunzip(Files.asByteSource(gzFile), testFile); @@ -540,7 +540,7 @@ public class CompressionUtilsTest assertGoodDataStream(inputStream); } if (testFile.exists() && !testFile.delete()) { - throw new RuntimeException(String.format("Unable to delete file [%s]", testFile.getAbsolutePath())); + throw new RE("Unable to delete file [%s]", testFile.getAbsolutePath()); } Assert.assertFalse(testFile.exists()); final AtomicLong flushes = new AtomicLong(0L); diff --git a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java index 292715b1b6d..8ac7ea4046c 100644 --- a/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/GranularityTest.java @@ -298,24 +298,41 @@ public class GranularityTest { if (pd.exception == null) { // check if path returns expected date Assert.assertEquals( - String.format("[%s,%s] Expected path %s to return date %s", granularity, formatter, pd.path, pd.date), - pd.date, - granularity.toDate(pd.path, formatter) + StringUtils.format( + "[%s,%s] Expected path %s to return date %s", + granularity, + formatter, + pd.path, + pd.date + ), + pd.date, + granularity.toDate(pd.path, formatter) ); if(formatter.equals(Granularity.Formatter.DEFAULT)) { Assert.assertEquals( - String.format("[%s] Expected toDate(%s) to return the same as toDate(%s, DEFAULT)", granularity, pd.path, pd.path), - granularity.toDate(pd.path), granularity.toDate(pd.path, formatter) + StringUtils.format( + "[%s] Expected toDate(%s) to return the same as toDate(%s, DEFAULT)", + granularity, + pd.path, + pd.path + ), + granularity.toDate(pd.path), granularity.toDate(pd.path, formatter) ); } if(pd.date != null) { // check if formatter is readable by toDate Assert.assertEquals( - String.format("[%s,%s] Expected date %s to return date %s", granularity, formatter, pd.date, pd.date), + StringUtils.format( + "[%s,%s] Expected date %s to return date %s", + granularity, + formatter, pd.date, - granularity.toDate(granularity.getFormatter(formatter).print(pd.date) + "/", formatter) + pd.date + ), + pd.date, + granularity.toDate(granularity.getFormatter(formatter).print(pd.date) + "/", formatter) ); } } else { @@ -330,9 +347,13 @@ public class GranularityTest { } Assert.assertTrue( - String.format( - "[%s,%s] Expected exception %s for path: %s", granularity, formatter, pd.exception, pd.path - ), flag + StringUtils.format( + "[%s,%s] Expected exception %s for path: %s", + granularity, + formatter, + pd.exception, + pd.path + ), flag ); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java index 758f1a991f5..3b2e9c353bb 100644 --- a/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/StringUtilsTest.java @@ -109,9 +109,9 @@ public class StringUtilsTest } @Test - public void testSafeFormat() + public void testNonStrictFormat() { - Assert.assertEquals("test%d; format", StringUtils.safeFormat("test%d", "format")); - Assert.assertEquals("test%s%s; format", StringUtils.safeFormat("test%s%s", "format")); + Assert.assertEquals("test%d; format", StringUtils.nonStrictFormat("test%d", "format")); + Assert.assertEquals("test%s%s; format", StringUtils.nonStrictFormat("test%s%s", "format")); } } diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java index a3609e5acde..1fff8757194 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/FilteredSequenceTest.java @@ -22,6 +22,7 @@ package io.druid.java.util.common.guava; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import org.junit.Test; import java.util.List; @@ -49,7 +50,7 @@ public class FilteredSequenceTest } SequenceTestHelper.testAll( - String.format("Run %,d: ", i), + StringUtils.format("Run %,d: ", i), new FilteredSequence<>(Sequences.simple(vals), pred), Lists.newArrayList(Iterables.filter(vals, pred)) ); diff --git a/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java b/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java index e0c8d387954..d1d25adc016 100644 --- a/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/guava/MappedSequenceTest.java @@ -21,6 +21,7 @@ package io.druid.java.util.common.guava; import com.google.common.base.Function; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import org.junit.Test; import java.util.List; @@ -48,7 +49,7 @@ public class MappedSequenceTest } SequenceTestHelper.testAll( - String.format("Run %,d: ", i), + StringUtils.format("Run %,d: ", i), new MappedSequence<>(Sequences.simple(vals), fn), Lists.transform(vals, fn) ); diff --git a/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java b/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java index 04cf1fe85a0..249b3302f46 100644 --- a/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java +++ b/java-util/src/test/java/io/druid/java/util/common/io/smoosh/SmooshedFileMapperTest.java @@ -23,6 +23,7 @@ import com.google.common.io.Files; import com.google.common.primitives.Ints; import io.druid.java.util.common.BufferUtils; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import junit.framework.Assert; import org.junit.Rule; import org.junit.Test; @@ -48,9 +49,9 @@ public class SmooshedFileMapperTest try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { for (int i = 0; i < 20; ++i) { - File tmpFile = folder.newFile(String.format("smoosh-%s.bin", i)); + File tmpFile = folder.newFile(StringUtils.format("smoosh-%s.bin", i)); Files.write(Ints.toByteArray(i), tmpFile); - smoosher.add(String.format("%d", i), tmpFile); + smoosher.add(StringUtils.format("%d", i), tmpFile); } } validateOutput(baseDir); @@ -62,12 +63,12 @@ public class SmooshedFileMapperTest File baseDir = folder.newFolder("base"); try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { - final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", 19), 4); + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(StringUtils.format("%d", 19), 4); for (int i = 0; i < 19; ++i) { - File tmpFile = File.createTempFile(String.format("smoosh-%s", i), ".bin"); + File tmpFile = File.createTempFile(StringUtils.format("smoosh-%s", i), ".bin"); Files.write(Ints.toByteArray(i), tmpFile); - smoosher.add(String.format("%d", i), tmpFile); + smoosher.add(StringUtils.format("%d", i), tmpFile); if (i == 10) { writer.write(ByteBuffer.wrap(Ints.toByteArray(19))); writer.close(); @@ -85,7 +86,7 @@ public class SmooshedFileMapperTest try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { for (int i = 0; i < 19; ++i) { - final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", i), 4); + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(StringUtils.format("%d", i), 4); writer.write(ByteBuffer.wrap(Ints.toByteArray(i))); } } @@ -97,13 +98,13 @@ public class SmooshedFileMapperTest File baseDir = folder.newFolder("base"); try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { - final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", 19), 4); + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(StringUtils.format("%d", 19), 4); writer.write(ByteBuffer.wrap(Ints.toByteArray(19))); for (int i = 0; i < 19; ++i) { - File tmpFile = File.createTempFile(String.format("smoosh-%s", i), ".bin"); + File tmpFile = File.createTempFile(StringUtils.format("smoosh-%s", i), ".bin"); Files.write(Ints.toByteArray(i), tmpFile); - smoosher.add(String.format("%d", i), tmpFile); + smoosher.add(StringUtils.format("%d", i), tmpFile); tmpFile.delete(); } writer.close(); @@ -118,7 +119,7 @@ public class SmooshedFileMapperTest try (FileSmoosher smoosher = new FileSmoosher(baseDir, 21)) { for (int i = 0; i < 20; ++i) { - final SmooshedWriter writer = smoosher.addWithSmooshedWriter(String.format("%d", i), 7); + final SmooshedWriter writer = smoosher.addWithSmooshedWriter(StringUtils.format("%d", i), 7); writer.write(ByteBuffer.wrap(Ints.toByteArray(i))); try { writer.close(); @@ -142,7 +143,7 @@ public class SmooshedFileMapperTest try (SmooshedFileMapper mapper = SmooshedFileMapper.load(baseDir)) { for (int i = 0; i < 20; ++i) { - ByteBuffer buf = mapper.mapFile(String.format("%d", i)); + ByteBuffer buf = mapper.mapFile(StringUtils.format("%d", i)); Assert.assertEquals(0, buf.position()); Assert.assertEquals(4, buf.remaining()); Assert.assertEquals(4, buf.capacity()); @@ -204,7 +205,7 @@ public class SmooshedFileMapperTest try (SmooshedFileMapper mapper = SmooshedFileMapper.load(baseDir)) { for (int i = 0; i < 20; ++i) { - ByteBuffer buf = mapper.mapFile(String.format("%d", i)); + ByteBuffer buf = mapper.mapFile(StringUtils.format("%d", i)); Assert.assertEquals(0, buf.position()); Assert.assertEquals(4, buf.remaining()); Assert.assertEquals(4, buf.capacity()); diff --git a/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java b/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java index 7a8789e313d..4b5ff197de8 100644 --- a/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java +++ b/processing/src/main/java/io/druid/query/ReflectionLoaderThingy.java @@ -20,6 +20,7 @@ package io.druid.query; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import java.util.Map; @@ -59,7 +60,7 @@ public class ReflectionLoaderThingy AtomicReference retVal1; try { - final Class queryToolChestClass = Class.forName(String.format("%s%s", clazz.getName(), interfaceName)); + final Class queryToolChestClass = Class.forName(StringUtils.format("%s%s", clazz.getName(), interfaceName)); retVal1 = new AtomicReference(interfaceClass.cast(queryToolChestClass.newInstance())); } catch (Exception e) { diff --git a/processing/src/main/java/io/druid/query/ResourceLimitExceededException.java b/processing/src/main/java/io/druid/query/ResourceLimitExceededException.java index 78d393ecfe8..e6cfe767c09 100644 --- a/processing/src/main/java/io/druid/query/ResourceLimitExceededException.java +++ b/processing/src/main/java/io/druid/query/ResourceLimitExceededException.java @@ -31,6 +31,6 @@ public class ResourceLimitExceededException extends RuntimeException { public ResourceLimitExceededException(String message, Object... arguments) { - super(StringUtils.safeFormat(message, arguments)); + super(StringUtils.nonStrictFormat(message, arguments)); } } diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java index ec15c68acb8..388ad911427 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactory.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation; import io.druid.java.util.common.Cacheable; +import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; import io.druid.segment.ColumnSelectorFactory; @@ -79,10 +80,7 @@ public abstract class AggregatorFactory implements Cacheable */ public AggregatorFactory getMergingFactory(AggregatorFactory other) throws AggregatorFactoryNotMergeableException { - throw new UnsupportedOperationException(String.format( - "[%s] does not implement getMergingFactory(..)", - this.getClass().getName() - )); + throw new UOE("[%s] does not implement getMergingFactory(..)", this.getClass().getName()); } /** diff --git a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactoryNotMergeableException.java b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactoryNotMergeableException.java index 998033ea440..4baf89cb12c 100644 --- a/processing/src/main/java/io/druid/query/aggregation/AggregatorFactoryNotMergeableException.java +++ b/processing/src/main/java/io/druid/query/aggregation/AggregatorFactoryNotMergeableException.java @@ -19,6 +19,8 @@ package io.druid.query.aggregation; +import io.druid.java.util.common.StringUtils; + /** */ public class AggregatorFactoryNotMergeableException extends Exception @@ -29,12 +31,12 @@ public class AggregatorFactoryNotMergeableException extends Exception public AggregatorFactoryNotMergeableException(String formatText, Object... arguments) { - super(String.format(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public AggregatorFactoryNotMergeableException(Throwable cause, String formatText, Object... arguments) { - super(String.format(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } public AggregatorFactoryNotMergeableException(Throwable cause) diff --git a/processing/src/main/java/io/druid/query/extraction/BucketExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/BucketExtractionFn.java index ed957ca0276..2e8d24fa007 100644 --- a/processing/src/main/java/io/druid/query/extraction/BucketExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/BucketExtractionFn.java @@ -22,6 +22,7 @@ package io.druid.query.extraction; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.primitives.Doubles; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.nio.ByteBuffer; @@ -144,6 +145,6 @@ public class BucketExtractionFn implements ExtractionFn @Override public String toString() { - return String.format("bucket(%f, %f)", size, offset); + return StringUtils.format("bucket(%f, %f)", size, offset); } } diff --git a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java index 5ee8b63834a..384ff3fcee2 100644 --- a/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/MatchingDimExtractionFn.java @@ -90,7 +90,7 @@ public class MatchingDimExtractionFn extends DimExtractionFn @Override public String toString() { - return String.format("regex_matches(%s)", expr); + return StringUtils.format("regex_matches(%s)", expr); } @Override diff --git a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java index 07b5f50f00b..6919319675a 100644 --- a/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/RegexDimExtractionFn.java @@ -152,7 +152,7 @@ public class RegexDimExtractionFn extends DimExtractionFn @Override public String toString() { - return String.format("regex(/%s/, %d)", expr, index); + return StringUtils.format("regex(/%s/, %d)", expr, index); } @Override diff --git a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java index 8be4fcbeafe..bad6f446a65 100644 --- a/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/StringFormatExtractionFn.java @@ -42,13 +42,13 @@ public class StringFormatExtractionFn extends DimExtractionFn @JsonCreator public static NullHandling forValue(String value) { - return value == null ? NULLSTRING : NullHandling.valueOf(value.toUpperCase()); + return value == null ? NULLSTRING : NullHandling.valueOf(StringUtils.toUpperCase(value)); } @JsonValue public String toValue() { - return name().toLowerCase(); + return StringUtils.toLowerCase(name()); } } @@ -105,7 +105,7 @@ public class StringFormatExtractionFn extends DimExtractionFn value = ""; } } - return Strings.emptyToNull(String.format(format, value)); + return Strings.emptyToNull(StringUtils.format(format, value)); } @Override diff --git a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java index 1fabf48406f..6c4214b4c7e 100644 --- a/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/SubstringDimExtractionFn.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.base.Preconditions; import com.google.common.base.Strings; +import io.druid.java.util.common.StringUtils; import javax.annotation.Nullable; import java.nio.ByteBuffer; @@ -134,6 +135,6 @@ public class SubstringDimExtractionFn extends DimExtractionFn @Override public String toString() { - return String.format("substring(%s, %s)", index, getLength()); + return StringUtils.format("substring(%s, %s)", index, getLength()); } } diff --git a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java index 6e096b67e72..059d3079fc8 100644 --- a/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java +++ b/processing/src/main/java/io/druid/query/extraction/TimeFormatExtractionFn.java @@ -198,6 +198,6 @@ public class TimeFormatExtractionFn implements ExtractionFn @Override public String toString() { - return String.format("timeFormat(\"%s\", %s, %s, %s, %s)", format, tz, locale, granularity, asMillis); + return StringUtils.format("timeFormat(\"%s\", %s, %s, %s, %s)", format, tz, locale, granularity, asMillis); } } diff --git a/processing/src/main/java/io/druid/query/filter/AndDimFilter.java b/processing/src/main/java/io/druid/query/filter/AndDimFilter.java index 33da2555eee..84bfdd341e3 100644 --- a/processing/src/main/java/io/druid/query/filter/AndDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/AndDimFilter.java @@ -25,6 +25,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; +import io.druid.java.util.common.StringUtils; import io.druid.query.Druids; import io.druid.segment.filter.AndFilter; import io.druid.segment.filter.Filters; @@ -119,6 +120,6 @@ public class AndDimFilter implements DimFilter @Override public String toString() { - return String.format("(%s)", AND_JOINER.join(fields)); + return StringUtils.format("(%s)", AND_JOINER.join(fields)); } } diff --git a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java index c25336e07b2..f5a9ded6d08 100644 --- a/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/BoundDimFilter.java @@ -300,13 +300,13 @@ public class BoundDimFilter implements DimFilter } if (extractionFn != null) { - builder.append(String.format("%s(%s)", extractionFn, dimension)); + builder.append(StringUtils.format("%s(%s)", extractionFn, dimension)); } else { builder.append(dimension); } if (!ordering.equals(StringComparators.LEXICOGRAPHIC)) { - builder.append(String.format(" as %s", ordering.toString())); + builder.append(StringUtils.format(" as %s", ordering.toString())); } if (upper != null) { diff --git a/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java b/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java index f6f9c767f97..3741f40f9a8 100644 --- a/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/ExtractionDimFilter.java @@ -113,6 +113,6 @@ public class ExtractionDimFilter implements DimFilter @Override public String toString() { - return String.format("%s(%s) = %s", extractionFn, dimension, value); + return StringUtils.format("%s(%s) = %s", extractionFn, dimension, value); } } diff --git a/processing/src/main/java/io/druid/query/filter/OrDimFilter.java b/processing/src/main/java/io/druid/query/filter/OrDimFilter.java index 9e2427a1b51..d0b9444d1e0 100644 --- a/processing/src/main/java/io/druid/query/filter/OrDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/OrDimFilter.java @@ -25,6 +25,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.RangeSet; import com.google.common.collect.TreeRangeSet; +import io.druid.java.util.common.StringUtils; import io.druid.query.Druids; import io.druid.segment.filter.Filters; import io.druid.segment.filter.OrFilter; @@ -117,6 +118,6 @@ public class OrDimFilter implements DimFilter @Override public String toString() { - return String.format("(%s)", OR_JOINER.join(fields)); + return StringUtils.format("(%s)", OR_JOINER.join(fields)); } } diff --git a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java index ffbf983d13d..9ffdb5f9854 100644 --- a/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java +++ b/processing/src/main/java/io/druid/query/filter/SelectorDimFilter.java @@ -145,9 +145,9 @@ public class SelectorDimFilter implements DimFilter public String toString() { if (extractionFn != null) { - return String.format("%s(%s) = %s", extractionFn, dimension, value); + return StringUtils.format("%s(%s) = %s", extractionFn, dimension, value); } else { - return String.format("%s = %s", dimension, value); + return StringUtils.format("%s = %s", dimension, value); } } diff --git a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java index 2e9f33d1fed..1c7fdeea2d1 100644 --- a/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java +++ b/processing/src/main/java/io/druid/query/groupby/GroupByQueryHelper.java @@ -32,6 +32,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Accumulator; @@ -242,9 +243,12 @@ public class GroupByQueryHelper for (AggregatorFactory aggregatorFactory : query.getAggregatorSpecs()) { final String typeName = aggregatorFactory.getTypeName(); - final ValueType valueType = typeName != null - ? Enums.getIfPresent(ValueType.class, typeName.toUpperCase()).orNull() - : null; + final ValueType valueType; + if (typeName != null) { + valueType = Enums.getIfPresent(ValueType.class, StringUtils.toUpperCase(typeName)).orNull(); + } else { + valueType = null; + } if (valueType != null) { types.put(aggregatorFactory.getName(), valueType); } diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java index 2fe1706b2a2..8e597c24dfa 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/ByteBufferIntList.java @@ -21,6 +21,7 @@ package io.druid.query.groupby.epinephelinae; import com.google.common.primitives.Ints; import com.metamx.common.IAE; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; @@ -51,7 +52,7 @@ public class ByteBufferIntList public void add(int val) { if (numElements == maxElements) { - throw new IndexOutOfBoundsException(String.format("List is full with %s elements.", maxElements)); + throw new IndexOutOfBoundsException(StringUtils.format("List is full with %d elements.", maxElements)); } buffer.putInt(numElements * Ints.BYTES, val); numElements++; diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java index c7b09faee99..594b22c6ea4 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByMergingQueryRunnerV2.java @@ -38,6 +38,7 @@ import io.druid.collections.Releaser; import io.druid.data.input.Row; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.CloseQuietly; @@ -139,7 +140,7 @@ public class GroupByMergingQueryRunnerV2 implements QueryRunner final File temporaryStorageDirectory = new File( processingTmpDir, - String.format("druid-groupBy-%s_%s", UUID.randomUUID(), query.getId()) + StringUtils.format("druid-groupBy-%s_%s", UUID.randomUUID(), query.getId()) ); final int priority = QueryContexts.getPriority(query); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java index 97261aa0788..4f15ae440d7 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/GroupByRowProcessor.java @@ -27,6 +27,7 @@ import io.druid.collections.ResourceHolder; import io.druid.common.guava.SettableSupplier; import io.druid.data.input.Row; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.CloseQuietly; @@ -79,7 +80,7 @@ public class GroupByRowProcessor final File temporaryStorageDirectory = new File( processingTmpDir, - String.format("druid-groupBy-%s_%s", UUID.randomUUID(), query.getId()) + StringUtils.format("druid-groupBy-%s_%s", UUID.randomUUID(), query.getId()) ); final List queryIntervals = query.getIntervals(); diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java index bcfd556232d..08870e12dad 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/LimitedTemporaryStorage.java @@ -22,6 +22,7 @@ package io.druid.query.groupby.epinephelinae; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import org.apache.commons.io.FileUtils; @@ -81,7 +82,7 @@ public class LimitedTemporaryStorage implements Closeable FileUtils.forceMkdir(storageDirectory); - final File theFile = new File(storageDirectory, String.format("%08d.tmp", files.size())); + final File theFile = new File(storageDirectory, StringUtils.format("%08d.tmp", files.size())); final EnumSet openOptions = EnumSet.of( StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE diff --git a/processing/src/main/java/io/druid/query/groupby/epinephelinae/TemporaryStorageFullException.java b/processing/src/main/java/io/druid/query/groupby/epinephelinae/TemporaryStorageFullException.java index c035c35ac15..e9092885886 100644 --- a/processing/src/main/java/io/druid/query/groupby/epinephelinae/TemporaryStorageFullException.java +++ b/processing/src/main/java/io/druid/query/groupby/epinephelinae/TemporaryStorageFullException.java @@ -19,12 +19,14 @@ package io.druid.query.groupby.epinephelinae; +import io.druid.java.util.common.StringUtils; + import java.io.IOException; public class TemporaryStorageFullException extends IOException { public TemporaryStorageFullException(final long maxBytesUsed) { - super(String.format("Cannot write to disk, hit limit of %,d bytes.", maxBytesUsed)); + super(StringUtils.format("Cannot write to disk, hit limit of %,d bytes.", maxBytesUsed)); } } diff --git a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java index c4a5d1c4665..1e09b2e78f5 100644 --- a/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java +++ b/processing/src/main/java/io/druid/query/groupby/orderby/OrderByColumnSpec.java @@ -65,13 +65,13 @@ public class OrderByColumnSpec @Override public String toString() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } @JsonCreator public static Direction fromString(String name) { - final String upperName = name.toUpperCase(); + final String upperName = StringUtils.toUpperCase(name); Direction direction = stupidEnumMap.get(upperName); if (direction == null) { diff --git a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java index 09c33d077c2..b8ec9ab5f3c 100644 --- a/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/io/druid/query/metadata/SegmentAnalyzer.java @@ -125,7 +125,7 @@ public class SegmentAnalyzer break; default: log.warn("Unknown column type[%s].", type); - analysis = ColumnAnalysis.error(String.format("unknown_type_%s", type)); + analysis = ColumnAnalysis.error(StringUtils.format("unknown_type_%s", type)); } columns.put(columnName, analysis); @@ -321,7 +321,7 @@ public class SegmentAnalyzer if (analyzingSize() && complexColumn != null) { final ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName); if (serde == null) { - return ColumnAnalysis.error(String.format("unknown_complex_%s", typeName)); + return ColumnAnalysis.error(StringUtils.format("unknown_complex_%s", typeName)); } final Function inputSizeFn = serde.inputSizeFn(); diff --git a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java index d185e58a290..31f8d111d04 100644 --- a/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java +++ b/processing/src/main/java/io/druid/query/metadata/metadata/SegmentMetadataQuery.java @@ -25,6 +25,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.StringUtils; import io.druid.query.BaseQuery; import io.druid.query.DataSource; import io.druid.query.Druids; @@ -68,13 +69,13 @@ public class SegmentMetadataQuery extends BaseQuery @Override public String toString() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } @JsonCreator public static AnalysisType fromString(String name) { - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } public byte[] getCacheKey() diff --git a/processing/src/main/java/io/druid/query/ordering/StringComparator.java b/processing/src/main/java/io/druid/query/ordering/StringComparator.java index 18f36dc85b5..fc1276496c6 100644 --- a/processing/src/main/java/io/druid/query/ordering/StringComparator.java +++ b/processing/src/main/java/io/druid/query/ordering/StringComparator.java @@ -21,6 +21,7 @@ package io.druid.query.ordering; import com.fasterxml.jackson.annotation.JsonCreator; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import java.util.Comparator; @@ -29,7 +30,7 @@ public abstract class StringComparator implements Comparator @JsonCreator public static StringComparator fromString(String type) { - switch (type.toLowerCase()) { + switch (StringUtils.toLowerCase(type)) { case StringComparators.LEXICOGRAPHIC_NAME: return StringComparators.LEXICOGRAPHIC; case StringComparators.ALPHANUMERIC_NAME: diff --git a/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java b/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java index a725bf10fc3..b3e0f392122 100644 --- a/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java +++ b/processing/src/main/java/io/druid/query/search/search/SearchSortSpec.java @@ -21,6 +21,7 @@ package io.druid.query.search.search; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; +import io.druid.java.util.common.StringUtils; import io.druid.query.ordering.StringComparator; import io.druid.query.ordering.StringComparators; @@ -73,7 +74,7 @@ public class SearchSortSpec @Override public String toString() { - return String.format("%sSort", ordering.toString()); + return StringUtils.format("%sSort", ordering.toString()); } @Override diff --git a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java index e60c545c5cf..54f75ad5843 100644 --- a/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java +++ b/processing/src/main/java/io/druid/query/spec/SpecificSegmentQueryRunner.java @@ -21,6 +21,7 @@ package io.druid.query.spec; import com.google.common.base.Supplier; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.SequenceWrapper; @@ -63,7 +64,7 @@ public class SpecificSegmentQueryRunner implements QueryRunner final Thread currThread = Thread.currentThread(); final String currThreadName = currThread.getName(); - final String newName = String.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals()); + final String newName = StringUtils.format("%s_%s_%s", query.getType(), query.getDataSource(), query.getIntervals()); final Sequence baseSequence = doNamed( currThread, currThreadName, newName, new Supplier>() diff --git a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java index 53cb38b1dde..29b51924d1e 100644 --- a/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/CompressedVSizeIndexedSupplier.java @@ -195,7 +195,7 @@ public class CompressedVSizeIndexedSupplier implements WritableSupplier= size) { - throw new IllegalArgumentException(String.format("Index[%s] >= size[%s]", index, size)); + throw new IAE("Index[%d] >= size[%d]", index, size); } return values.get(index + offset); } diff --git a/processing/src/main/java/io/druid/segment/FloatColumnSerializer.java b/processing/src/main/java/io/druid/segment/FloatColumnSerializer.java index f06bfe2eebc..15560f14d85 100644 --- a/processing/src/main/java/io/druid/segment/FloatColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/FloatColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -64,7 +65,7 @@ public class FloatColumnSerializer implements GenericColumnSerializer { writer = CompressionFactory.getFloatSerializer( ioPeon, - String.format("%s.float_column", filenameBase), + StringUtils.format("%s.float_column", filenameBase), byteOrder, compression ); diff --git a/processing/src/main/java/io/druid/segment/IndexIO.java b/processing/src/main/java/io/druid/segment/IndexIO.java index 0ea368d945a..8057f36eba3 100644 --- a/processing/src/main/java/io/druid/segment/IndexIO.java +++ b/processing/src/main/java/io/druid/segment/IndexIO.java @@ -36,7 +36,10 @@ import io.druid.collections.bitmap.ConciseBitmapFactory; import io.druid.collections.bitmap.ImmutableBitmap; import io.druid.collections.spatial.ImmutableRTree; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.IAE; +import io.druid.java.util.common.IOE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.Smoosh; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import io.druid.java.util.common.logger.Logger; @@ -211,7 +214,7 @@ public class IndexIO { final long fileSize = indexFile.length(); if (fileSize > Integer.MAX_VALUE) { - throw new IOException(String.format("File[%s] too large[%s]", indexFile, fileSize)); + throw new IOE("File[%s] too large[%d]", indexFile, fileSize); } } @@ -311,7 +314,7 @@ public class IndexIO indexIn = new FileInputStream(new File(inDir, "index.drd")); byte theVersion = (byte) indexIn.read(); if (theVersion != V8_VERSION) { - throw new IllegalArgumentException(String.format("Unknown version[%s]", theVersion)); + throw new IAE("Unknown version[%d]", theVersion); } } finally { @@ -407,7 +410,6 @@ public class IndexIO return retVal; } - } static interface IndexLoader @@ -536,7 +538,7 @@ public class IndexIO final int theVersion = Ints.fromByteArray(Files.toByteArray(new File(inDir, "version.bin"))); if (theVersion != V9_VERSION) { - throw new IllegalArgumentException(String.format("Expected version[9], got[%s]", theVersion)); + throw new IAE("Expected version[9], got[%d]", theVersion); } SmooshedFileMapper smooshedFiles = Smoosh.map(inDir); @@ -618,21 +620,21 @@ public class IndexIO public static File makeDimFile(File dir, String dimension) { - return new File(dir, String.format("dim_%s.drd", dimension)); + return new File(dir, StringUtils.format("dim_%s.drd", dimension)); } public static File makeNumericDimFile(File dir, String dimension, ByteOrder order) { - return new File(dir, String.format("numeric_dim_%s_%s.drd", dimension, order)); + return new File(dir, StringUtils.format("numeric_dim_%s_%s.drd", dimension, order)); } public static File makeTimeFile(File dir, ByteOrder order) { - return new File(dir, String.format("time_%s.drd", order)); + return new File(dir, StringUtils.format("time_%s.drd", order)); } public static File makeMetricFile(File dir, String metricName, ByteOrder order) { - return new File(dir, String.format("met_%s_%s.drd", metricName, order)); + return new File(dir, StringUtils.format("met_%s_%s.drd", metricName, order)); } } diff --git a/processing/src/main/java/io/druid/segment/LongColumnSerializer.java b/processing/src/main/java/io/druid/segment/LongColumnSerializer.java index 8583ae4ef38..b31a5f50454 100644 --- a/processing/src/main/java/io/druid/segment/LongColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/LongColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.data.CompressedObjectStrategy; import io.druid.segment.data.CompressionFactory; @@ -71,7 +72,7 @@ public class LongColumnSerializer implements GenericColumnSerializer { writer = CompressionFactory.getLongSerializer( ioPeon, - String.format("%s.long_column", filenameBase), + StringUtils.format("%s.long_column", filenameBase), byteOrder, encoding, compression diff --git a/processing/src/main/java/io/druid/segment/SegmentMissingException.java b/processing/src/main/java/io/druid/segment/SegmentMissingException.java index 3c4437b4035..fb6be3a1ce9 100644 --- a/processing/src/main/java/io/druid/segment/SegmentMissingException.java +++ b/processing/src/main/java/io/druid/segment/SegmentMissingException.java @@ -20,11 +20,12 @@ package io.druid.segment; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; public class SegmentMissingException extends ISE { public SegmentMissingException(String formatText, Object... arguments) { - super(String.format(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public SegmentMissingException(Throwable cause, String formatText, Object... arguments){ diff --git a/processing/src/main/java/io/druid/segment/SegmentValidationException.java b/processing/src/main/java/io/druid/segment/SegmentValidationException.java index b4ea06173e9..12fa53ad700 100644 --- a/processing/src/main/java/io/druid/segment/SegmentValidationException.java +++ b/processing/src/main/java/io/druid/segment/SegmentValidationException.java @@ -19,6 +19,8 @@ package io.druid.segment; +import io.druid.java.util.common.StringUtils; + /** * */ @@ -26,11 +28,11 @@ public class SegmentValidationException extends RuntimeException { public SegmentValidationException(String formatText, Object... arguments) { - super(String.format(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public SegmentValidationException(Throwable cause, String formatText, Object... arguments) { - super(String.format(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } } diff --git a/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java b/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java index 2f8e23d6895..cc91e0873d7 100644 --- a/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java +++ b/processing/src/main/java/io/druid/segment/StringDimensionMergerV9.java @@ -32,6 +32,7 @@ import io.druid.collections.spatial.RTree; import io.druid.collections.spatial.split.LinearGutmanSplitStrategy; import io.druid.java.util.common.ByteBufferUtils; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.logger.Logger; import io.druid.segment.column.ColumnCapabilities; @@ -156,7 +157,7 @@ public class StringDimensionMergerV9 implements DimensionMergerV9 numMergeIndex++; } - String dictFilename = String.format("%s.dim_values", dimensionName); + String dictFilename = StringUtils.format("%s.dim_values", dimensionName); dictionaryWriter = new GenericIndexedWriter<>( ioPeon, dictFilename, @@ -200,7 +201,7 @@ public class StringDimensionMergerV9 implements DimensionMergerV9 { final CompressedObjectStrategy.CompressionStrategy compressionStrategy = indexSpec.getDimensionCompression(); - String filenameBase = String.format("%s.forward_dim", dimensionName); + String filenameBase = StringUtils.format("%s.forward_dim", dimensionName); if (capabilities.hasMultipleValues()) { encodedValueWriter = (compressionStrategy != CompressedObjectStrategy.CompressionStrategy.UNCOMPRESSED) ? CompressedVSizeIndexedV3Writer.create( @@ -275,7 +276,7 @@ public class StringDimensionMergerV9 implements DimensionMergerV9 long dimStartTime = System.currentTimeMillis(); final BitmapSerdeFactory bitmapSerdeFactory = indexSpec.getBitmapSerdeFactory(); - String bmpFilename = String.format("%s.inverted", dimensionName); + String bmpFilename = StringUtils.format("%s.inverted", dimensionName); bitmapWriter = new GenericIndexedWriter<>( ioPeon, bmpFilename, @@ -309,7 +310,7 @@ public class StringDimensionMergerV9 implements DimensionMergerV9 if (hasSpatial) { spatialWriter = new ByteBufferWriter<>( ioPeon, - String.format("%s.spatial", dimensionName), + StringUtils.format("%s.spatial", dimensionName), new IndexedRTree.ImmutableRTreeObjectStrategy(bmpFactory) ); spatialWriter.open(); diff --git a/processing/src/main/java/io/druid/segment/column/ValueType.java b/processing/src/main/java/io/druid/segment/column/ValueType.java index 171517c2f90..e486904eb2a 100644 --- a/processing/src/main/java/io/druid/segment/column/ValueType.java +++ b/processing/src/main/java/io/druid/segment/column/ValueType.java @@ -20,6 +20,7 @@ package io.druid.segment.column; import com.fasterxml.jackson.annotation.JsonCreator; +import io.druid.java.util.common.StringUtils; /** */ @@ -36,6 +37,6 @@ public enum ValueType if (name == null) { return null; } - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } } diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java index 08a0415a642..8ac5b84be10 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedFloatSupplier.java @@ -22,6 +22,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; import com.google.common.primitives.Floats; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; @@ -118,7 +119,7 @@ public class BlockLayoutIndexedFloatSupplier implements Supplier { if (totalSize - index < toFill.length) { throw new IndexOutOfBoundsException( - String.format( + StringUtils.format( "Cannot fill array of size[%,d] at index[%,d]. Max size[%,d]", toFill.length, index, totalSize ) ); diff --git a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java index 2976758f9a1..7202b5bc02f 100644 --- a/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/BlockLayoutIndexedLongSupplier.java @@ -21,6 +21,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; import io.druid.collections.ResourceHolder; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; @@ -154,7 +155,7 @@ public class BlockLayoutIndexedLongSupplier implements Supplier { if (totalSize - index < toFill.length) { throw new IndexOutOfBoundsException( - String.format( + StringUtils.format( "Cannot fill array of size[%,d] at index[%,d]. Max size[%,d]", toFill.length, index, totalSize ) ); diff --git a/processing/src/main/java/io/druid/segment/data/ByteBufferWriter.java b/processing/src/main/java/io/druid/segment/data/ByteBufferWriter.java index 82baa089baa..75345588801 100644 --- a/processing/src/main/java/io/druid/segment/data/ByteBufferWriter.java +++ b/processing/src/main/java/io/druid/segment/data/ByteBufferWriter.java @@ -27,6 +27,7 @@ import com.google.common.io.CountingOutputStream; import com.google.common.io.InputSupplier; import com.google.common.primitives.Ints; import io.druid.common.utils.SerializerUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import java.io.Closeable; @@ -76,7 +77,7 @@ public class ByteBufferWriter implements Closeable private String makeFilename(String suffix) { - return String.format("%s.%s", filenameBase, suffix); + return StringUtils.format("%s.%s", filenameBase, suffix); } @Override diff --git a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java index 3978dcb8ce8..4426a15ac91 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedIntsIndexedSupplier.java @@ -25,6 +25,7 @@ import com.google.common.primitives.Ints; import io.druid.collections.ResourceHolder; import io.druid.collections.StupidResourceHolder; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import io.druid.query.monomorphicprocessing.RuntimeShapeInspector; @@ -318,7 +319,7 @@ public class CompressedIntsIndexedSupplier implements WritableSupplier implements ObjectStrateg @Override public String toString() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } @JsonCreator public static CompressionStrategy fromString(String name) { - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } static final Map idMap = Maps.newHashMap(); diff --git a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIndexedV3Writer.java b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIndexedV3Writer.java index 6972735aa8f..660127cdeb1 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressedVSizeIndexedV3Writer.java +++ b/processing/src/main/java/io/druid/segment/data/CompressedVSizeIndexedV3Writer.java @@ -22,6 +22,7 @@ */ package io.druid.segment.data; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.CompressedVSizeIndexedV3Supplier; import io.druid.segment.IndexIO; @@ -48,14 +49,14 @@ public class CompressedVSizeIndexedV3Writer extends MultiValueIndexedIntsWriter return new CompressedVSizeIndexedV3Writer( new CompressedIntsIndexedWriter( ioPeon, - String.format("%s.offsets", filenameBase), + StringUtils.format("%s.offsets", filenameBase), CompressedIntsIndexedSupplier.MAX_INTS_IN_BUFFER, IndexIO.BYTE_ORDER, compression ), new CompressedVSizeIntsIndexedWriter( ioPeon, - String.format("%s.values", filenameBase), + StringUtils.format("%s.values", filenameBase), maxValue, CompressedVSizeIntsIndexedSupplier.maxIntsInBufferForValue(maxValue), IndexIO.BYTE_ORDER, diff --git a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java index 94b2335762f..a84a77bb61f 100644 --- a/processing/src/main/java/io/druid/segment/data/CompressionFactory.java +++ b/processing/src/main/java/io/druid/segment/data/CompressionFactory.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonValue; import com.google.common.base.Supplier; import com.google.common.collect.Maps; import io.druid.java.util.common.IAE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.SmooshedFileMapper; import java.io.IOException; @@ -137,13 +138,13 @@ public class CompressionFactory @Override public String toString() { - return this.name().toLowerCase(); + return StringUtils.toLowerCase(this.name()); } @JsonCreator public static LongEncodingStrategy fromString(String name) { - return valueOf(name.toUpperCase()); + return valueOf(StringUtils.toUpperCase(name)); } } diff --git a/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedFloatSupplier.java b/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedFloatSupplier.java index 14b806fee2a..22559e36729 100644 --- a/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedFloatSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedFloatSupplier.java @@ -20,6 +20,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; +import io.druid.java.util.common.StringUtils; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -62,7 +63,7 @@ public class EntireLayoutIndexedFloatSupplier implements Supplier { if (totalSize - index < toFill.length) { throw new IndexOutOfBoundsException( - String.format( + StringUtils.format( "Cannot fill array of size[%,d] at index[%,d]. Max size[%,d]", toFill.length, index, totalSize ) ); diff --git a/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedLongSupplier.java b/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedLongSupplier.java index 6ddd8da1025..5b6b6f589f2 100644 --- a/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedLongSupplier.java +++ b/processing/src/main/java/io/druid/segment/data/EntireLayoutIndexedLongSupplier.java @@ -20,6 +20,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; +import io.druid.java.util.common.StringUtils; public class EntireLayoutIndexedLongSupplier implements Supplier { @@ -59,7 +60,7 @@ public class EntireLayoutIndexedLongSupplier implements Supplier { if (totalSize - index < toFill.length) { throw new IndexOutOfBoundsException( - String.format( + StringUtils.format( "Cannot fill array of size[%,d] at index[%,d]. Max size[%,d]", toFill.length, index, totalSize ) ); diff --git a/processing/src/main/java/io/druid/segment/data/GenericIndexed.java b/processing/src/main/java/io/druid/segment/data/GenericIndexed.java index 2922786ecce..e5a4f9a9bd3 100644 --- a/processing/src/main/java/io/druid/segment/data/GenericIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/GenericIndexed.java @@ -63,8 +63,8 @@ import java.util.Iterator; * bytes 11-14 =>; columnNameLength * bytes 15-columnNameLength =>; columnName *

- * Header file name is identified as: String.format("%s_header", columnName) - * value files are identified as: String.format("%s_value_%d", columnName, fileNumber) + * Header file name is identified as: StringUtils.format("%s_header", columnName) + * value files are identified as: StringUtils.format("%s_value_%d", columnName, fileNumber) * number of value files == numElements/numberOfElementsPerValueFile */ public class GenericIndexed implements Indexed @@ -236,7 +236,7 @@ public class GenericIndexed implements Indexed throw new IAE("Index[%s] < 0", index); } if (index >= size) { - throw new IAE(String.format("Index[%s] >= size[%s]", index, size)); + throw new IAE("Index[%d] >= size[%d]", index, size); } } diff --git a/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java b/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java index 993bea56ed1..82177a0e12a 100644 --- a/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java +++ b/processing/src/main/java/io/druid/segment/data/GenericIndexedWriter.java @@ -97,12 +97,12 @@ public class GenericIndexedWriter implements Closeable public static String generateValueFileName(String fileNameBase, int fileNum) { - return String.format("%s_value_%d", fileNameBase, fileNum); + return StringUtils.format("%s_value_%d", fileNameBase, fileNum); } public static String generateHeaderFileName(String fileNameBase) { - return String.format("%s_header", fileNameBase); + return StringUtils.format("%s_header", fileNameBase); } private static void writeBytesIntoSmooshedChannel( @@ -160,7 +160,7 @@ public class GenericIndexedWriter implements Closeable private String makeFilename(String suffix) { - return String.format("%s.%s", filenameBase, suffix); + return StringUtils.format("%s.%s", filenameBase, suffix); } @Override diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java index 1f7eb057b74..f2fb63bd69e 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexed.java @@ -123,7 +123,7 @@ public class VSizeIndexed implements IndexedMultivalue public VSizeIndexedInts get(int index) { if (index >= size) { - throw new IllegalArgumentException(String.format("Index[%s] >= size[%s]", index, size)); + throw new IAE("Index[%d] >= size[%d]", index, size); } ByteBuffer myBuffer = theBuffer.asReadOnlyBuffer(); diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexedIntsWriter.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexedIntsWriter.java index 727072aa075..624aa3d4e73 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexedIntsWriter.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexedIntsWriter.java @@ -22,6 +22,7 @@ package io.druid.segment.data; import com.google.common.io.ByteStreams; import com.google.common.io.CountingOutputStream; import com.google.common.primitives.Ints; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import java.io.IOException; @@ -51,7 +52,7 @@ public class VSizeIndexedIntsWriter extends SingleValueIndexedIntsWriter ) { this.ioPeon = ioPeon; - this.valueFileName = String.format("%s.values", filenameBase); + this.valueFileName = StringUtils.format("%s.values", filenameBase); this.numBytes = VSizeIndexedInts.getNumBytesForMax(maxValue); } diff --git a/processing/src/main/java/io/druid/segment/data/VSizeIndexedWriter.java b/processing/src/main/java/io/druid/segment/data/VSizeIndexedWriter.java index fb8670a613c..732b3722ad5 100644 --- a/processing/src/main/java/io/druid/segment/data/VSizeIndexedWriter.java +++ b/processing/src/main/java/io/druid/segment/data/VSizeIndexedWriter.java @@ -27,6 +27,7 @@ import com.google.common.io.Closeables; import com.google.common.io.CountingOutputStream; import com.google.common.io.InputSupplier; import com.google.common.primitives.Ints; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import java.io.Closeable; @@ -64,9 +65,9 @@ public class VSizeIndexedWriter extends MultiValueIndexedIntsWriter implements C ) { this.ioPeon = ioPeon; - this.metaFileName = String.format("%s.meta", filenameBase); - this.headerFileName = String.format("%s.header", filenameBase); - this.valuesFileName = String.format("%s.values", filenameBase); + this.metaFileName = StringUtils.format("%s.meta", filenameBase); + this.headerFileName = StringUtils.format("%s.header", filenameBase); + this.valuesFileName = StringUtils.format("%s.values", filenameBase); this.maxId = maxId; } diff --git a/processing/src/main/java/io/druid/segment/filter/AndFilter.java b/processing/src/main/java/io/druid/segment/filter/AndFilter.java index 0c1405b7f3c..176633c69de 100644 --- a/processing/src/main/java/io/druid/segment/filter/AndFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/AndFilter.java @@ -23,6 +23,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import io.druid.collections.bitmap.ImmutableBitmap; +import io.druid.java.util.common.StringUtils; import io.druid.query.BitmapResultFactory; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.BooleanFilter; @@ -175,7 +176,7 @@ public class AndFilter implements BooleanFilter @Override public String toString() { - return String.format("(%s)", AND_JOINER.join(filters)); + return StringUtils.format("(%s)", AND_JOINER.join(filters)); } private ValueMatcher makeMatcher(final ValueMatcher[] baseMatchers) diff --git a/processing/src/main/java/io/druid/segment/filter/DimensionPredicateFilter.java b/processing/src/main/java/io/druid/segment/filter/DimensionPredicateFilter.java index cde3d416a02..b6fdea538cf 100644 --- a/processing/src/main/java/io/druid/segment/filter/DimensionPredicateFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/DimensionPredicateFilter.java @@ -21,6 +21,7 @@ package io.druid.segment.filter; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; +import io.druid.java.util.common.StringUtils; import io.druid.query.BitmapResultFactory; import io.druid.query.extraction.ExtractionFn; import io.druid.query.filter.BitmapIndexSelector; @@ -141,9 +142,9 @@ public class DimensionPredicateFilter implements Filter public String toString() { if (extractionFn != null) { - return String.format("%s(%s) = %s", extractionFn, dimension, basePredicateString); + return StringUtils.format("%s(%s) = %s", extractionFn, dimension, basePredicateString); } else { - return String.format("%s = %s", dimension, basePredicateString); + return StringUtils.format("%s = %s", dimension, basePredicateString); } } } diff --git a/processing/src/main/java/io/druid/segment/filter/OrFilter.java b/processing/src/main/java/io/druid/segment/filter/OrFilter.java index 216c9d0c666..d100f16bf7f 100644 --- a/processing/src/main/java/io/druid/segment/filter/OrFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/OrFilter.java @@ -23,6 +23,7 @@ import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import io.druid.collections.bitmap.ImmutableBitmap; +import io.druid.java.util.common.StringUtils; import io.druid.query.BitmapResultFactory; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.BooleanFilter; @@ -181,6 +182,6 @@ public class OrFilter implements BooleanFilter @Override public String toString() { - return String.format("(%s)", OR_JOINER.join(filters)); + return StringUtils.format("(%s)", OR_JOINER.join(filters)); } } diff --git a/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java b/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java index eef0148f061..7ecd9283665 100644 --- a/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java +++ b/processing/src/main/java/io/druid/segment/filter/SelectorFilter.java @@ -19,6 +19,7 @@ package io.druid.segment.filter; +import io.druid.java.util.common.StringUtils; import io.druid.query.BitmapResultFactory; import io.druid.query.filter.BitmapIndexSelector; import io.druid.query.filter.Filter; @@ -77,6 +78,6 @@ public class SelectorFilter implements Filter @Override public String toString() { - return String.format("%s = %s", dimension, value); + return StringUtils.format("%s = %s", dimension, value); } } diff --git a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java index 7cea6d9be1f..c35ae9859e5 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/IncrementalIndex.java @@ -39,6 +39,7 @@ import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.PostAggregator; @@ -143,7 +144,7 @@ public abstract class IncrementalIndex implements Iterable, final ObjectColumnSelector rawColumnSelector = baseSelectorFactory.makeObjectColumnSelector(column); - if ((Enums.getIfPresent(ValueType.class, typeName.toUpperCase()).isPresent() && !typeName.equalsIgnoreCase(ValueType.COMPLEX.name())) + if ((Enums.getIfPresent(ValueType.class, StringUtils.toUpperCase(typeName)).isPresent() && !typeName.equalsIgnoreCase(ValueType.COMPLEX.name())) || !deserializeComplexMetrics) { return rawColumnSelector; } else { diff --git a/processing/src/main/java/io/druid/segment/incremental/IndexSizeExceededException.java b/processing/src/main/java/io/druid/segment/incremental/IndexSizeExceededException.java index 4fc61a34b1b..6d33bd15b3d 100644 --- a/processing/src/main/java/io/druid/segment/incremental/IndexSizeExceededException.java +++ b/processing/src/main/java/io/druid/segment/incremental/IndexSizeExceededException.java @@ -19,6 +19,8 @@ package io.druid.segment.incremental; +import io.druid.java.util.common.StringUtils; + import java.io.IOException; public class IndexSizeExceededException extends IOException @@ -29,12 +31,12 @@ public class IndexSizeExceededException extends IOException public IndexSizeExceededException(String formatText, Object... arguments) { - super(String.format(formatText, arguments)); + super(StringUtils.nonStrictFormat(formatText, arguments)); } public IndexSizeExceededException(Throwable cause, String formatText, Object... arguments) { - super(String.format(formatText, arguments), cause); + super(StringUtils.nonStrictFormat(formatText, arguments), cause); } public IndexSizeExceededException(Throwable cause) diff --git a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java index a6aa8292577..41dca35d027 100644 --- a/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/OffheapIncrementalIndex.java @@ -26,6 +26,7 @@ import io.druid.collections.ResourceHolder; import io.druid.data.input.InputRow; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.AggregatorFactory; @@ -247,7 +248,7 @@ public class OffheapIncrementalIndex extends IncrementalIndex { final boolean canAdd = size() < maxRowCount; if (!canAdd) { - outOfRowsReason = String.format("Maximum number of rows [%d] reached", maxRowCount); + outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; } diff --git a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java index 059d954772c..8c62bd14967 100644 --- a/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java +++ b/processing/src/main/java/io/druid/segment/incremental/OnheapIncrementalIndex.java @@ -23,6 +23,7 @@ import com.google.common.base.Supplier; import com.google.common.base.Throwables; import com.google.common.collect.Maps; import io.druid.data.input.InputRow; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; @@ -237,7 +238,7 @@ public class OnheapIncrementalIndex extends IncrementalIndex { final boolean canAdd = size() < maxRowCount; if (!canAdd) { - outOfRowsReason = String.format("Maximum number of rows [%d] reached", maxRowCount); + outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; } diff --git a/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java b/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java index b31416d1b3f..9b97ceaa902 100644 --- a/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/serde/ComplexColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment.serde; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.GenericColumnSerializer; import io.druid.segment.data.GenericIndexedWriter; @@ -59,7 +60,7 @@ public class ComplexColumnSerializer implements GenericColumnSerializer public void open() throws IOException { writer = new GenericIndexedWriter( - ioPeon, String.format("%s.complex_column", filenameBase), strategy + ioPeon, StringUtils.format("%s.complex_column", filenameBase), strategy ); writer.open(); } diff --git a/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java b/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java index 7c45b62c22b..888912da9b7 100644 --- a/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java +++ b/processing/src/main/java/io/druid/segment/serde/LargeColumnSupportedComplexColumnSerializer.java @@ -19,6 +19,7 @@ package io.druid.segment.serde; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.segment.GenericColumnSerializer; import io.druid.segment.data.GenericIndexedWriter; @@ -81,7 +82,7 @@ public class LargeColumnSupportedComplexColumnSerializer implements GenericColum public void open() throws IOException { writer = new GenericIndexedWriter( - ioPeon, String.format("%s.complex_column", filenameBase), strategy, columnSize); + ioPeon, StringUtils.format("%s.complex_column", filenameBase), strategy, columnSize); writer.open(); } diff --git a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java index 0080d06cd03..b7466914745 100644 --- a/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java +++ b/processing/src/test/java/io/druid/granularity/QueryGranularityTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; @@ -725,12 +726,12 @@ public class QueryGranularityTest Assert.assertEquals( granularity, - mapper.readValue("\"" + granularityType.name().toUpperCase() + "\"", Granularity.class) + mapper.readValue("\"" + StringUtils.toUpperCase(granularityType.name()) + "\"", Granularity.class) ); Assert.assertEquals( granularity, - mapper.readValue("\"" + granularityType.name().toLowerCase() + "\"", Granularity.class) + mapper.readValue("\"" + StringUtils.toLowerCase(granularityType.name()) + "\"", Granularity.class) ); Assert.assertEquals( @@ -740,12 +741,12 @@ public class QueryGranularityTest if (granularityType == GranularityType.ALL || granularityType == GranularityType.NONE) { Assert.assertEquals( - "{\"type\":\"" + granularityType.name().toLowerCase() + "\"}", + "{\"type\":\"" + StringUtils.toLowerCase(granularityType.name()) + "\"}", mapper.writeValueAsString(granularity) ); } else { Assert.assertEquals( - "\"" + granularityType.name().toUpperCase() + "\"", + "\"" + StringUtils.toUpperCase(granularityType.name()) + "\"", mapper.writeValueAsString(granularity) ); } diff --git a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java index f523a4c348d..d2f1edaaed6 100644 --- a/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java +++ b/processing/src/test/java/io/druid/jackson/DefaultObjectMapperTest.java @@ -20,6 +20,7 @@ package io.druid.jackson; import com.fasterxml.jackson.databind.ObjectMapper; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.junit.Assert; import org.junit.Test; @@ -35,6 +36,6 @@ public class DefaultObjectMapperTest { final DateTime time = new DateTime(); - Assert.assertEquals(String.format("\"%s\"", time), mapper.writeValueAsString(time)); + Assert.assertEquals(StringUtils.format("\"%s\"", time), mapper.writeValueAsString(time)); } } diff --git a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java index c26349711b4..1550c130e16 100644 --- a/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/JavaScriptAggregatorTest.java @@ -22,6 +22,7 @@ package io.druid.query.aggregation; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import io.druid.js.JavaScriptConfig; import io.druid.query.dimension.DimensionSpec; import io.druid.segment.ColumnSelectorFactory; @@ -321,7 +322,7 @@ public class JavaScriptAggregatorTest a.aggregate(); a.aggregate(); a.aggregate(); - if(i % 1000 == 0) System.out.println(String.format("Query object %d", i)); + if(i % 1000 == 0) System.out.println(StringUtils.format("Query object %d", i)); } */ @@ -357,7 +358,7 @@ public class JavaScriptAggregatorTest ++i; } long t1 = System.currentTimeMillis() - t; - System.out.println(String.format("JavaScript aggregator == %,f: %d ms", aggRhino.getFloat(), t1)); + System.out.println(StringUtils.format("JavaScript aggregator == %,f: %d ms", aggRhino.getFloat(), t1)); t = System.currentTimeMillis(); i = 0; @@ -366,8 +367,8 @@ public class JavaScriptAggregatorTest ++i; } long t2 = System.currentTimeMillis() - t; - System.out.println(String.format("DoubleSum aggregator == %,f: %d ms", doubleAgg.getFloat(), t2)); + System.out.println(StringUtils.format("DoubleSum aggregator == %,f: %d ms", doubleAgg.getFloat(), t2)); - System.out.println(String.format("JavaScript is %2.1fx slower", (double) t1 / t2)); + System.out.println(StringUtils.format("JavaScript is %2.1fx slower", (double) t1 / t2)); } } diff --git a/processing/src/test/java/io/druid/query/aggregation/MetricManipulatorFnsTest.java b/processing/src/test/java/io/druid/query/aggregation/MetricManipulatorFnsTest.java index 9a2bb83e5ef..94b2b4f94f2 100644 --- a/processing/src/test/java/io/druid/query/aggregation/MetricManipulatorFnsTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/MetricManipulatorFnsTest.java @@ -20,6 +20,7 @@ package io.druid.query.aggregation; import io.druid.hll.HyperLogLogCollector; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.segment.TestLongColumnSelector; import org.junit.Assert; @@ -104,7 +105,7 @@ public class MetricManipulatorFnsTest for (Object[] argList : constructorArrays) { Assert.assertEquals( - String.format( + StringUtils.format( "Arglist %s is too short. Expected 6 found %d", Arrays.toString(argList), argList.length diff --git a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java index c301cb7f2c6..1df8b7d7667 100644 --- a/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java +++ b/processing/src/test/java/io/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java @@ -23,6 +23,7 @@ import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; import io.druid.hll.HLLCV0; import io.druid.hll.HyperLogLogCollector; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -154,11 +155,11 @@ public class HyperUniquesAggregatorFactoryTest // then, assert hyperloglog comparator behaves consistently with estimated cardinalities Assert.assertEquals( - String.format("orderedByComparator=%d, orderedByCardinality=%d,\n" + - "Left={cardinality=%f, hll=%s},\n" + - "Right={cardinality=%f, hll=%s},\n", orderedByComparator, orderedByCardinality, - leftCollector.estimateCardinality(), leftCollector, - rightCollector.estimateCardinality(), rightCollector), + StringUtils.format("orderedByComparator=%d, orderedByCardinality=%d,\n" + + "Left={cardinality=%f, hll=%s},\n" + + "Right={cardinality=%f, hll=%s},\n", orderedByComparator, orderedByCardinality, + leftCollector.estimateCardinality(), leftCollector, + rightCollector.estimateCardinality(), rightCollector), orderedByCardinality, orderedByComparator ); diff --git a/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java b/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java index 6f486084473..44c74c13c6f 100644 --- a/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java +++ b/processing/src/test/java/io/druid/query/dimension/LegacyDimensionSpecTest.java @@ -22,6 +22,7 @@ package io.druid.query.dimension; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -37,7 +38,7 @@ public class LegacyDimensionSpecTest { final String dimension = "testDimension"; final List deserializedSpecs = mapper.readValue( - String.format("[\"%s\"]", dimension), new TypeReference>() + StringUtils.format("[\"%s\"]", dimension), new TypeReference>() { } ); diff --git a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java index 65db4e6b92d..39308954007 100644 --- a/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java +++ b/processing/src/test/java/io/druid/query/extraction/MapLookupExtractionFnSerDeTest.java @@ -26,6 +26,7 @@ import com.google.inject.Injector; import com.google.inject.Key; import io.druid.guice.GuiceInjectors; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -56,7 +57,7 @@ public class MapLookupExtractionFnSerDeTest public void testDeserialization() throws IOException { final DimExtractionFn fn = mapper.reader(DimExtractionFn.class).readValue( - String.format( + StringUtils.format( "{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}}", mapper.writeValueAsString(renames) ) @@ -69,7 +70,7 @@ public class MapLookupExtractionFnSerDeTest Assert.assertEquals( crazyString, mapper.reader(DimExtractionFn.class).readValue( - String.format( + StringUtils.format( "{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}, \"retainMissingValue\":true}", mapper.writeValueAsString(renames) ) diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java index 85feca8e680..75239edea17 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByQueryRunnerTest.java @@ -38,6 +38,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.Row; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.DurationGranularity; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -383,7 +384,7 @@ public class GroupByQueryRunnerTest for (GroupByQueryConfig config : testConfigs()) { final GroupByQueryRunnerFactory factory = makeQueryRunnerFactory(config); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { - final String testName = String.format( + final String testName = StringUtils.format( "config=%s, runner=%s", config.toString(), runner.toString() @@ -2575,7 +2576,7 @@ public class GroupByQueryRunnerTest Map context = Maps.newHashMap(); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit) + Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), StringUtils.format("limit: %d", limit) ); } @@ -2622,7 +2623,7 @@ public class GroupByQueryRunnerTest Map context = Maps.newHashMap(); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit) + Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), StringUtils.format("limit: %d", limit) ); } @@ -2677,7 +2678,7 @@ public class GroupByQueryRunnerTest Map context = Maps.newHashMap(); TestHelper.assertExpectedObjects( - Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit) + Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), StringUtils.format("limit: %d", limit) ); } diff --git a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index 032cf0e7a20..2807263ad58 100644 --- a/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import com.google.common.util.concurrent.MoreExecutors; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -164,7 +165,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( - String.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), + StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); diff --git a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java index 99cfc079472..cea4730e348 100644 --- a/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/io/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.io.CharSource; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequences; import io.druid.query.Druids; import io.druid.query.QueryRunner; @@ -255,20 +256,20 @@ public class SearchQueryRunnerWithCaseTest String dimension = resultValue.getDimension(); String theValue = resultValue.getValue(); Assert.assertTrue( - String.format("Result had unknown dimension[%s]", dimension), + StringUtils.format("Result had unknown dimension[%s]", dimension), expectedResults.containsKey(dimension) ); Set expectedSet = expectedResults.get(dimension); Assert.assertTrue( - String.format("Couldn't remove dim[%s], value[%s]", dimension, theValue), expectedSet.remove(theValue) + StringUtils.format("Couldn't remove dim[%s], value[%s]", dimension, theValue), expectedSet.remove(theValue) ); } } for (Map.Entry> entry : expectedResults.entrySet()) { Assert.assertTrue( - String.format( + StringUtils.format( "Dimension[%s] should have had everything removed, still has[%s]", entry.getKey(), entry.getValue() ), entry.getValue().isEmpty() diff --git a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java index a1d2cda6bd6..e7d44c79ddb 100644 --- a/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/io/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -193,7 +194,7 @@ public class TimeseriesQueryRunnerTest for (Result result : results) { DateTime current = result.getTimestamp(); Assert.assertFalse( - String.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), + StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast) ); @@ -253,7 +254,7 @@ public class TimeseriesQueryRunnerTest for (Result result : results) { DateTime current = result.getTimestamp(); Assert.assertFalse( - String.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), + StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast) ); Assert.assertEquals(ImmutableMap.of(), result.getValue().getBaseObject()); @@ -290,7 +291,7 @@ public class TimeseriesQueryRunnerTest Assert.assertEquals(expectedEarliest, result.getTimestamp()); Assert.assertFalse( - String.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), + StringUtils.format("Timestamp[%s] > expectedLast[%s]", result.getTimestamp(), expectedLast), result.getTimestamp().isAfter(expectedLast) ); @@ -338,7 +339,7 @@ public class TimeseriesQueryRunnerTest for (Result result : results) { DateTime current = result.getTimestamp(); Assert.assertFalse( - String.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), + StringUtils.format("Timestamp[%s] > expectedLast[%s]", current, expectedLast), descending ? current.isBefore(expectedLast) : current.isAfter(expectedLast) ); diff --git a/processing/src/test/java/io/druid/segment/IndexBuilder.java b/processing/src/test/java/io/druid/segment/IndexBuilder.java index 3b5eed11764..7e3e65257d5 100644 --- a/processing/src/test/java/io/druid/segment/IndexBuilder.java +++ b/processing/src/test/java/io/druid/segment/IndexBuilder.java @@ -25,6 +25,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.data.input.InputRow; +import io.druid.java.util.common.StringUtils; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.CountAggregatorFactory; import io.druid.segment.incremental.IncrementalIndex; @@ -121,7 +122,7 @@ public class IndexBuilder return TestHelper.getTestIndexIO().loadIndex( indexMerger.persist( incrementalIndex, - new File(tmpDir, String.format("testIndex-%s", new Random().nextInt(Integer.MAX_VALUE))), + new File(tmpDir, StringUtils.format("testIndex-%s", new Random().nextInt(Integer.MAX_VALUE))), indexSpec ) ); @@ -147,7 +148,7 @@ public class IndexBuilder maxRows, rows.subList(i, Math.min(rows.size(), i + ROWS_PER_INDEX_FOR_MERGING)) ), - new File(tmpDir, String.format("testIndex-%s", UUID.randomUUID().toString())), + new File(tmpDir, StringUtils.format("testIndex-%s", UUID.randomUUID().toString())), indexSpec ) ) @@ -181,7 +182,7 @@ public class IndexBuilder ), AggregatorFactory.class ), - new File(tmpDir, String.format("testIndex-%s", UUID.randomUUID())), + new File(tmpDir, StringUtils.format("testIndex-%s", UUID.randomUUID())), indexSpec ) ); diff --git a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java index cd1f98115de..6b0f3fe0912 100644 --- a/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/io/druid/segment/SchemalessTestFullTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; @@ -1358,19 +1359,19 @@ public class SchemalessTestFullTest return Arrays.asList( new Pair<>( SchemalessIndexTest.getIncrementalIndex(index1, index2), - String.format("Failed: II[%,d, %,d]", index1, index2) + StringUtils.format("Failed: II[%,d, %,d]", index1, index2) ), new Pair<>( SchemalessIndexTest.getIncrementalIndex(index2, index1), - String.format("Failed: II[%,d, %,d]", index2, index1) + StringUtils.format("Failed: II[%,d, %,d]", index2, index1) ), new Pair<>( SchemalessIndexTest.getMergedIncrementalIndex(index1, index2), - String.format("Failed: MII[%,d, %,d]", index1, index2) + StringUtils.format("Failed: MII[%,d, %,d]", index1, index2) ), new Pair<>( SchemalessIndexTest.getMergedIncrementalIndex(index2, index1), - String.format("Failed: MII[%,d, %,d]", index2, index1) + StringUtils.format("Failed: MII[%,d, %,d]", index2, index1) ) ); } diff --git a/processing/src/test/java/io/druid/segment/TestHelper.java b/processing/src/test/java/io/druid/segment/TestHelper.java index 16259313b66..25661e8cc5b 100644 --- a/processing/src/test/java/io/druid/segment/TestHelper.java +++ b/processing/src/test/java/io/druid/segment/TestHelper.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import io.druid.data.input.MapBasedRow; import io.druid.data.input.Row; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.math.expr.ExprMacroTable; @@ -151,7 +152,7 @@ public class TestHelper // in result comparison assertTimeseriesResultValue(failMsg, (Result) expectedNext, (Result) next); assertTimeseriesResultValue( - String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), + StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), (Result) expectedNext, (Result) next2 ); @@ -159,7 +160,7 @@ public class TestHelper } else { assertResult(failMsg, (Result) expectedNext, (Result) next); assertResult( - String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), + StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg), (Result) expectedNext, (Result) next2 ); @@ -168,19 +169,19 @@ public class TestHelper if (resultsIter.hasNext()) { Assert.fail( - String.format("%s: Expected resultsIter to be exhausted, next element was %s", failMsg, resultsIter.next()) + StringUtils.format("%s: Expected resultsIter to be exhausted, next element was %s", failMsg, resultsIter.next()) ); } if (resultsIter2.hasNext()) { Assert.fail( - String.format("%s: Expected resultsIter2 to be exhausted, next element was %s", failMsg, resultsIter.next()) + StringUtils.format("%s: Expected resultsIter2 to be exhausted, next element was %s", failMsg, resultsIter.next()) ); } if (expectedResultsIter.hasNext()) { Assert.fail( - String.format( + StringUtils.format( "%s: Expected expectedResultsIter to be exhausted, next element was %s", failMsg, expectedResultsIter.next() @@ -202,7 +203,7 @@ public class TestHelper final Object next2 = resultsIter2.next(); String failMsg = msg + "-" + index++; - String failMsg2 = String.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg); + String failMsg2 = StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg); if (expectedNext instanceof Row) { // HACK! Special casing for groupBy @@ -216,19 +217,19 @@ public class TestHelper if (resultsIter.hasNext()) { Assert.fail( - String.format("%s: Expected resultsIter to be exhausted, next element was %s", msg, resultsIter.next()) + StringUtils.format("%s: Expected resultsIter to be exhausted, next element was %s", msg, resultsIter.next()) ); } if (resultsIter2.hasNext()) { Assert.fail( - String.format("%s: Expected resultsIter2 to be exhausted, next element was %s", msg, resultsIter.next()) + StringUtils.format("%s: Expected resultsIter2 to be exhausted, next element was %s", msg, resultsIter.next()) ); } if (expectedResultsIter.hasNext()) { Assert.fail( - String.format( + StringUtils.format( "%s: Expected expectedResultsIter to be exhausted, next element was %s", msg, expectedResultsIter.next() @@ -246,7 +247,7 @@ public class TestHelper { // Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't // always generate exactly the same results (different merge ordering / float vs double) - Assert.assertEquals(String.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp()); + Assert.assertEquals(StringUtils.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp()); TimeseriesResultValue expectedVal = (TimeseriesResultValue) expected.getValue(); TimeseriesResultValue actualVal = (TimeseriesResultValue) actual.getValue(); @@ -254,21 +255,21 @@ public class TestHelper final Map expectedMap = (Map) expectedVal.getBaseObject(); final Map actualMap = (Map) actualVal.getBaseObject(); - Assert.assertEquals(String.format("%s: map keys", msg), expectedMap.keySet(), actualMap.keySet()); + Assert.assertEquals(StringUtils.format("%s: map keys", msg), expectedMap.keySet(), actualMap.keySet()); for (final String key : expectedMap.keySet()) { final Object expectedValue = expectedMap.get(key); final Object actualValue = actualMap.get(key); if (expectedValue instanceof Float || expectedValue instanceof Double) { Assert.assertEquals( - String.format("%s: key[%s]", msg, key), + StringUtils.format("%s: key[%s]", msg, key), ((Number) expectedValue).doubleValue(), ((Number) actualValue).doubleValue(), ((Number) expectedValue).doubleValue() * 1e-6 ); } else { Assert.assertEquals( - String.format("%s: key[%s]", msg, key), + StringUtils.format("%s: key[%s]", msg, key), expectedValue, actualValue ); @@ -281,7 +282,7 @@ public class TestHelper // Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't // always generate exactly the same results (different merge ordering / float vs double) Assert.assertEquals( - String.format("%s: timestamp", msg), + StringUtils.format("%s: timestamp", msg), expected.getTimestamp().getMillis(), actual.getTimestamp().getMillis() ); @@ -289,21 +290,21 @@ public class TestHelper final Map expectedMap = ((MapBasedRow) expected).getEvent(); final Map actualMap = ((MapBasedRow) actual).getEvent(); - Assert.assertEquals(String.format("%s: map keys", msg), expectedMap.keySet(), actualMap.keySet()); + Assert.assertEquals(StringUtils.format("%s: map keys", msg), expectedMap.keySet(), actualMap.keySet()); for (final String key : expectedMap.keySet()) { final Object expectedValue = expectedMap.get(key); final Object actualValue = actualMap.get(key); if (expectedValue instanceof Float || expectedValue instanceof Double) { Assert.assertEquals( - String.format("%s: key[%s]", msg, key), + StringUtils.format("%s: key[%s]", msg, key), ((Number) expectedValue).doubleValue(), ((Number) actualValue).doubleValue(), ((Number) expectedValue).doubleValue() * 1e-6 ); } else { Assert.assertEquals( - String.format("%s: key[%s]", msg, key), + StringUtils.format("%s: key[%s]", msg, key), expectedValue, actualValue ); diff --git a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java index 94d7701302f..acdbd288acb 100644 --- a/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java +++ b/processing/src/test/java/io/druid/segment/data/BenchmarkIndexibleWrites.java @@ -29,6 +29,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; @@ -216,7 +217,7 @@ public class BenchmarkIndexibleWrites extends AbstractBenchmark ); } Futures.allAsList(futures).get(); - Assert.assertTrue(String.format("Index too small %d, expected %d across %d loops", index.get(), totalIndexSize, loops), index.get()>=totalIndexSize); + Assert.assertTrue(StringUtils.format("Index too small %d, expected %d across %d loops", index.get(), totalIndexSize, loops), index.get() >= totalIndexSize); for(int i = 0; i < index.get(); ++i){ Assert.assertEquals(i, concurrentIndexible.get(i).intValue()); } @@ -295,7 +296,7 @@ public class BenchmarkIndexibleWrites extends AbstractBenchmark Futures.allAsList(futures).get(); executorService.shutdown(); - Assert.assertTrue(String.format("Index too small %d, expected %d across %d loops", index.get(), totalIndexSize, loops), index.get()>=totalIndexSize); + Assert.assertTrue(StringUtils.format("Index too small %d, expected %d across %d loops", index.get(), totalIndexSize, loops), index.get() >= totalIndexSize); for(int i = 0; i < index.get(); ++i){ Assert.assertEquals(i, concurrentIndexible.get(i).intValue()); } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java index bc043a3a905..03b16186a51 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedFloatsSerdeTest.java @@ -22,6 +22,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; import com.google.common.io.ByteSink; import com.google.common.primitives.Floats; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import it.unimi.dsi.fastutil.ints.IntArrays; import org.junit.Assert; @@ -225,7 +226,7 @@ public class CompressedFloatsSerdeTest final float indexedVal = indexed.get(j); if (Floats.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread1[%d]: %f != %f", j, val, indexedVal)); + reason.set(StringUtils.format("Thread1[%d]: %f != %f", j, val, indexedVal)); stopLatch.countDown(); return; } @@ -264,7 +265,7 @@ public class CompressedFloatsSerdeTest final float indexedVal = indexed2.get(j); if (Floats.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread2[%d]: %f != %f", j, val, indexedVal)); + reason.set(StringUtils.format("Thread2[%d]: %f != %f", j, val, indexedVal)); stopLatch.countDown(); return; } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java index e1c2fe945cd..cd0ddc240ec 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedSupplierTest.java @@ -21,6 +21,7 @@ package io.druid.segment.data; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import it.unimi.dsi.fastutil.ints.IntArrays; @@ -241,7 +242,7 @@ public class CompressedIntsIndexedSupplierTest extends CompressionStrategyTest final long indexedVal = indexed.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread1[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread1[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } @@ -280,7 +281,7 @@ public class CompressedIntsIndexedSupplierTest extends CompressionStrategyTest final long indexedVal = indexed2.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread2[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread2[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java index 60ce1fa027c..303f10c8b9c 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedIntsIndexedWriterTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.java.util.common.io.smoosh.Smoosh; @@ -180,7 +181,7 @@ public class CompressedIntsIndexedWriterTest private void checkV2SerializedSizeAndData(int chunkFactor) throws Exception { - File tmpDirectory = Files.createTempDirectory(String.format( + File tmpDirectory = Files.createTempDirectory(StringUtils.format( "CompressedIntsIndexedWriterTest_%d", chunkFactor )).toFile(); diff --git a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java index b8897b01f7d..4a74be54392 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedLongsSerdeTest.java @@ -22,6 +22,7 @@ package io.druid.segment.data; import com.google.common.base.Supplier; import com.google.common.io.ByteSink; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import it.unimi.dsi.fastutil.ints.IntArrays; import org.junit.Assert; @@ -249,7 +250,7 @@ public class CompressedLongsSerdeTest final long indexedVal = indexed.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread1[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread1[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } @@ -288,7 +289,7 @@ public class CompressedLongsSerdeTest final long indexedVal = indexed2.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread2[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread2[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java index b3aa68d4974..bc6bed9cbcf 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIndexedV3WriterTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.io.smoosh.FileSmoosher; import io.druid.java.util.common.io.smoosh.Smoosh; @@ -235,7 +236,7 @@ public class CompressedVSizeIndexedV3WriterTest private void checkV2SerializedSizeAndData(int offsetChunkFactor, int valueChunkFactor) throws Exception { - File tmpDirectory = Files.createTempDirectory(String.format( + File tmpDirectory = Files.createTempDirectory(StringUtils.format( "CompressedVSizeIndexedV3WriterTest_%d_%d", offsetChunkFactor, offsetChunkFactor diff --git a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java index 128144ab6b0..6111defe400 100644 --- a/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java +++ b/processing/src/test/java/io/druid/segment/data/CompressedVSizeIntsIndexedSupplierTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.primitives.Ints; import com.google.common.primitives.Longs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.segment.CompressedPools; import it.unimi.dsi.fastutil.ints.IntArrays; @@ -279,7 +280,7 @@ public class CompressedVSizeIntsIndexedSupplierTest extends CompressionStrategyT final long indexedVal = indexed.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread1[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread1[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } @@ -318,7 +319,7 @@ public class CompressedVSizeIntsIndexedSupplierTest extends CompressionStrategyT final long indexedVal = indexed2.get(j); if (Longs.compare(val, indexedVal) != 0) { failureHappened.set(true); - reason.set(String.format("Thread2[%d]: %d != %d", j, val, indexedVal)); + reason.set(StringUtils.format("Thread2[%d]: %d != %d", j, val, indexedVal)); stopLatch.countDown(); return; } diff --git a/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java b/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java index c45df43815d..e4043e776aa 100644 --- a/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java +++ b/processing/src/test/java/io/druid/segment/data/IOPeonForTesting.java @@ -20,6 +20,7 @@ package io.druid.segment.data; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -55,7 +56,7 @@ class IOPeonForTesting implements IOPeon ByteArrayOutputStream outStream = outStreams.get(filename); if (outStream == null) { - throw new FileNotFoundException(String.format("unknown file[%s]", filename)); + throw new FileNotFoundException(StringUtils.format("unknown file[%s]", filename)); } return new ByteArrayInputStream(outStream.toByteArray()); diff --git a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java index 866f34f0fc6..d90871e1c47 100644 --- a/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/io/druid/segment/data/IncrementalIndexTest.java @@ -34,6 +34,7 @@ import io.druid.collections.StupidPool; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.Row; import io.druid.data.input.impl.DimensionsSpec; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; @@ -273,7 +274,7 @@ public class IncrementalIndexTest List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, dimName + rowID); } @@ -285,7 +286,7 @@ public class IncrementalIndexTest List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, (Long) 1L); } @@ -412,14 +413,14 @@ public class IncrementalIndexTest for (int i = 0; i < dimensionCount; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); ingestAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); } @@ -451,14 +452,14 @@ public class IncrementalIndexTest for (int i = 0; i < dimensionCount; ++i) { queryAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("sumResult%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("sumResult%s", i) ) ); queryAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("doubleSumResult%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("doubleSumResult%s", i) ) ); } @@ -491,14 +492,14 @@ public class IncrementalIndexTest Assert.assertEquals(rows * (isRollup ? 1 : 2), result.getValue().getLongMetric("rows").intValue()); for (int i = 0; i < dimensionCount; ++i) { Assert.assertEquals( - String.format("Failed long sum on dimension %d", i), + "Failed long sum on dimension " + i, 2 * rows, - result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue() + result.getValue().getLongMetric("sumResult" + i).intValue() ); Assert.assertEquals( - String.format("Failed double sum on dimension %d", i), + "Failed double sum on dimension " + i, 2 * rows, - result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue() + result.getValue().getDoubleMetric("doubleSumResult" + i).intValue() ); } } @@ -512,14 +513,14 @@ public class IncrementalIndexTest for (int i = 0; i < dimensionCount; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); ingestAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); } @@ -529,14 +530,14 @@ public class IncrementalIndexTest for (int i = 0; i < dimensionCount; ++i) { queryAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("sumResult%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("sumResult%s", i) ) ); queryAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("doubleSumResult%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("doubleSumResult%s", i) ) ); } @@ -667,7 +668,7 @@ public class IncrementalIndexTest // Eventually consistent, but should be somewhere in that range // Actual result is validated after all writes are guaranteed done. Assert.assertTrue( - String.format("%d >= %g >= 0 violated", maxValueExpected, result), + StringUtils.format("%d >= %g >= 0 violated", maxValueExpected, result), result >= 0 && result <= maxValueExpected ); } @@ -711,14 +712,14 @@ public class IncrementalIndexTest ); for (int i = 0; i < dimensionCount; ++i) { Assert.assertEquals( - String.format("Failed long sum on dimension %d", i), + StringUtils.format("Failed long sum on dimension %d", i), elementsPerThread * concurrentThreads, - result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue() + result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue() ); Assert.assertEquals( - String.format("Failed double sum on dimension %d", i), + StringUtils.format("Failed double sum on dimension %d", i), elementsPerThread * concurrentThreads, - result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue() + result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue() ); } } diff --git a/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java b/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java index 1529eb8ecd7..016fb5252a4 100644 --- a/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java +++ b/processing/src/test/java/io/druid/segment/data/UnioningOffsetTest.java @@ -20,6 +20,7 @@ package io.druid.segment.data; import com.google.common.collect.Lists; +import io.druid.java.util.common.StringUtils; import org.junit.Assert; import org.junit.Test; @@ -104,8 +105,8 @@ public class UnioningOffsetTest for (int i = 0; i < expectedValues.length; ++i) { for (int j = 0; j < offsets.size(); ++j) { Offset aClone = offsets.get(j); - Assert.assertTrue(String.format("Clone[%d] out of bounds", j), aClone.withinBounds()); - Assert.assertEquals(String.format("Clone[%d] not right", j), expectedValues[i], aClone.getOffset()); + Assert.assertTrue(StringUtils.format("Clone[%d] out of bounds", j), aClone.withinBounds()); + Assert.assertEquals(StringUtils.format("Clone[%d] not right", j), expectedValues[i], aClone.getOffset()); aClone.increment(); } offsets.add(offsets.get(0).clone()); diff --git a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java index 7a85b76d901..7411c44e388 100644 --- a/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java +++ b/processing/src/test/java/io/druid/segment/filter/BaseFilterTest.java @@ -30,6 +30,7 @@ import io.druid.common.guava.SettableSupplier; import io.druid.common.utils.JodaUtils; import io.druid.data.input.InputRow; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -252,7 +253,7 @@ public abstract class BaseFilterTest for (Map.Entry>> finisherEntry : finishers.entrySet()) { for (boolean cnf : ImmutableList.of(false, true)) { for (boolean optimize : ImmutableList.of(false, true)) { - final String testName = String.format( + final String testName = StringUtils.format( "bitmaps[%s], indexMerger[%s], finisher[%s], optimize[%s]", bitmapSerdeFactoryEntry.getKey(), indexMergerEntry.getKey(), diff --git a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java index 34a9f189a9d..ce0ce6dedca 100644 --- a/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java +++ b/processing/src/test/java/io/druid/segment/filter/SpatialFilterBonusTest.java @@ -28,6 +28,7 @@ import io.druid.collections.spatial.search.RectangularBound; import io.druid.data.input.MapBasedInputRow; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.SpatialDimensionSchema; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.query.Druids; import io.druid.query.FinalizeResultsQueryRunner; @@ -217,7 +218,7 @@ public class SpatialFilterBonusTest for (int i = 6; i < NUM_POINTS; i++) { String coord = null; while (coord == null) { - coord = String.format( + coord = StringUtils.format( "%s,%s", (float) (rand.nextFloat() * 10 + 10.0), (float) (rand.nextFloat() * 10 + 10.0) @@ -412,7 +413,7 @@ public class SpatialFilterBonusTest ImmutableMap.of( "timestamp", new DateTime("2013-01-01").toString(), "dim", "boo", - "dim.geo", String.format( + "dim.geo", StringUtils.format( "%s,%s", (float) (rand.nextFloat() * 10 + 10.0), (float) (rand.nextFloat() * 10 + 10.0) diff --git a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 8449c3fd03e..944b600e406 100644 --- a/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/io/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -33,6 +33,7 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.ThreadFactoryBuilder; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequences; @@ -91,14 +92,14 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark for (int i = 0; i < dimensionCount; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); ingestAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("Dim_%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("Dim_%s", i) ) ); } @@ -265,7 +266,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark List dimensionList = new ArrayList(dimensionCount); ImmutableMap.Builder builder = ImmutableMap.builder(); for (int i = 0; i < dimensionCount; i++) { - String dimName = String.format("Dim_%d", i); + String dimName = StringUtils.format("Dim_%d", i); dimensionList.add(dimName); builder.put(dimName, new Integer(rowID).longValue()); } @@ -304,14 +305,14 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark for (int i = 0; i < dimensionCount; ++i) { queryAggregatorFactories.add( new LongSumAggregatorFactory( - String.format("sumResult%s", i), - String.format("sumResult%s", i) + StringUtils.format("sumResult%s", i), + StringUtils.format("sumResult%s", i) ) ); queryAggregatorFactories.add( new DoubleSumAggregatorFactory( - String.format("doubleSumResult%s", i), - String.format("doubleSumResult%s", i) + StringUtils.format("doubleSumResult%s", i), + StringUtils.format("doubleSumResult%s", i) ) ); } @@ -436,14 +437,14 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue()); for (int i = 0; i < dimensionCount; ++i) { Assert.assertEquals( - String.format("Failed long sum on dimension %d", i), + StringUtils.format("Failed long sum on dimension %d", i), expectedVal, - result.getValue().getLongMetric(String.format("sumResult%s", i)).intValue() + result.getValue().getLongMetric(StringUtils.format("sumResult%s", i)).intValue() ); Assert.assertEquals( - String.format("Failed double sum on dimension %d", i), + StringUtils.format("Failed double sum on dimension %d", i), expectedVal, - result.getValue().getDoubleMetric(String.format("doubleSumResult%s", i)).intValue() + result.getValue().getDoubleMetric(StringUtils.format("doubleSumResult%s", i)).intValue() ); } } diff --git a/server/src/main/java/io/druid/client/CachingClusteredClient.java b/server/src/main/java/io/druid/client/CachingClusteredClient.java index 3256dc370b7..e885376a777 100644 --- a/server/src/main/java/io/druid/client/CachingClusteredClient.java +++ b/server/src/main/java/io/druid/client/CachingClusteredClient.java @@ -50,6 +50,7 @@ import io.druid.concurrent.Execs; import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.LazySequence; import io.druid.java.util.common.guava.MergeSequence; @@ -323,7 +324,7 @@ public class CachingClusteredClient implements QueryRunner // otherwise, if populating cache, add segment to list of segments to cache final String segmentIdentifier = segment.lhs.getSegment().getIdentifier(); cachePopulatorMap.put( - String.format("%s_%s", segmentIdentifier, segmentQueryInterval), + StringUtils.format("%s_%s", segmentIdentifier, segmentQueryInterval), new CachePopulator(cache, objectMapper, segmentCacheKey) ); } @@ -486,7 +487,7 @@ public class CachingClusteredClient implements QueryRunner { final BySegmentResultValueClass value = input.getValue(); final CachePopulator cachePopulator = cachePopulatorMap.get( - String.format("%s_%s", value.getSegmentId(), value.getInterval()) + StringUtils.format("%s_%s", value.getSegmentId(), value.getInterval()) ); final Queue> cacheFutures = new ConcurrentLinkedQueue<>(); diff --git a/server/src/main/java/io/druid/client/DirectDruidClient.java b/server/src/main/java/io/druid/client/DirectDruidClient.java index b8f1f2600b0..35a86b0f320 100644 --- a/server/src/main/java/io/druid/client/DirectDruidClient.java +++ b/server/src/main/java/io/druid/client/DirectDruidClient.java @@ -192,8 +192,8 @@ public class DirectDruidClient implements QueryRunner } final ListenableFuture future; - final String url = String.format("http://%s/druid/v2/", host); - final String cancelUrl = String.format("http://%s/druid/v2/%s", host, query.getId()); + final String url = StringUtils.format("http://%s/druid/v2/", host); + final String cancelUrl = StringUtils.format("http://%s/druid/v2/%s", host, query.getId()); try { log.debug("Querying queryId[%s] url[%s]", query.getId(), url); @@ -379,7 +379,7 @@ public class DirectDruidClient implements QueryRunner @Override public void exceptionCaught(final ClientResponse clientResponse, final Throwable e) { - String msg = StringUtils.safeFormat( + String msg = StringUtils.format( "Query[%s] url[%s] failed with exception msg [%s]", query.getId(), url, @@ -412,7 +412,7 @@ public class DirectDruidClient implements QueryRunner { long timeLeft = timeoutAt - System.currentTimeMillis(); if (timeLeft <= 0) { - String msg = StringUtils.safeFormat("Query[%s] url[%s] timed out.", query.getId(), url); + String msg = StringUtils.format("Query[%s] url[%s] timed out.", query.getId(), url); setupResponseReadFailure(msg, null); throw new RE(msg); } else { @@ -423,7 +423,7 @@ public class DirectDruidClient implements QueryRunner private void checkTotalBytesLimit(long bytes) { if (maxScatterGatherBytes < Long.MAX_VALUE && totalBytesGathered.addAndGet(bytes) > maxScatterGatherBytes) { - String msg = StringUtils.safeFormat( + String msg = StringUtils.format( "Query[%s] url[%s] max scatter-gather bytes limit reached.", query.getId(), url diff --git a/server/src/main/java/io/druid/client/HttpServerInventoryView.java b/server/src/main/java/io/druid/client/HttpServerInventoryView.java index c2bb8eee61b..fb5b13f16ad 100644 --- a/server/src/main/java/io/druid/client/HttpServerInventoryView.java +++ b/server/src/main/java/io/druid/client/HttpServerInventoryView.java @@ -45,6 +45,7 @@ import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.coordination.DataSegmentChangeCallback; @@ -507,14 +508,14 @@ public class HttpServerInventoryView implements ServerInventoryView, FilteredSer try { final String req; if (counter != null) { - req = String.format( + req = StringUtils.format( "/druid-internal/v1/segments?counter=%s&hash=%s&timeout=%s", counter.getCounter(), counter.getHash(), config.getServerTimeout() ); } else { - req = String.format( + req = StringUtils.format( "/druid-internal/v1/segments?counter=-1&timeout=%s", config.getServerTimeout() ); diff --git a/server/src/main/java/io/druid/client/cache/BytesBoundedLinkedQueue.java b/server/src/main/java/io/druid/client/cache/BytesBoundedLinkedQueue.java index 492c006411f..eb30febfae4 100644 --- a/server/src/main/java/io/druid/client/cache/BytesBoundedLinkedQueue.java +++ b/server/src/main/java/io/druid/client/cache/BytesBoundedLinkedQueue.java @@ -19,6 +19,8 @@ package io.druid.client.cache; +import io.druid.java.util.common.IAE; + import java.util.AbstractQueue; import java.util.Collection; import java.util.Iterator; @@ -63,9 +65,7 @@ public abstract class BytesBoundedLinkedQueue extends AbstractQueue implem private void checkSize(E e) { if (getBytesSize(e) > capacity) { - throw new IllegalArgumentException( - String.format("cannot add element of size[%d] greater than capacity[%d]", getBytesSize(e), capacity) - ); + throw new IAE("cannot add element of size[%d] greater than capacity[%d]", getBytesSize(e), capacity); } } diff --git a/server/src/main/java/io/druid/client/cache/CacheMonitor.java b/server/src/main/java/io/druid/client/cache/CacheMonitor.java index 6816ac61f36..752b90614c0 100644 --- a/server/src/main/java/io/druid/client/cache/CacheMonitor.java +++ b/server/src/main/java/io/druid/client/cache/CacheMonitor.java @@ -23,6 +23,7 @@ import com.google.inject.Inject; import com.metamx.emitter.service.ServiceEmitter; import com.metamx.emitter.service.ServiceMetricEvent; import com.metamx.metrics.AbstractMonitor; +import io.druid.java.util.common.StringUtils; public class CacheMonitor extends AbstractMonitor { @@ -77,15 +78,15 @@ public class CacheMonitor extends AbstractMonitor ) { if (cache != null) { - emitter.emit(builder.build(String.format("%s/numEntries", metricPrefix), cacheStats.getNumEntries())); - emitter.emit(builder.build(String.format("%s/sizeBytes", metricPrefix), cacheStats.getSizeInBytes())); - emitter.emit(builder.build(String.format("%s/hits", metricPrefix), cacheStats.getNumHits())); - emitter.emit(builder.build(String.format("%s/misses", metricPrefix), cacheStats.getNumMisses())); - emitter.emit(builder.build(String.format("%s/evictions", metricPrefix), cacheStats.getNumEvictions())); - emitter.emit(builder.build(String.format("%s/hitRate", metricPrefix), cacheStats.hitRate())); - emitter.emit(builder.build(String.format("%s/averageBytes", metricPrefix), cacheStats.averageBytes())); - emitter.emit(builder.build(String.format("%s/timeouts", metricPrefix), cacheStats.getNumTimeouts())); - emitter.emit(builder.build(String.format("%s/errors", metricPrefix), cacheStats.getNumErrors())); + emitter.emit(builder.build(StringUtils.format("%s/numEntries", metricPrefix), cacheStats.getNumEntries())); + emitter.emit(builder.build(StringUtils.format("%s/sizeBytes", metricPrefix), cacheStats.getSizeInBytes())); + emitter.emit(builder.build(StringUtils.format("%s/hits", metricPrefix), cacheStats.getNumHits())); + emitter.emit(builder.build(StringUtils.format("%s/misses", metricPrefix), cacheStats.getNumMisses())); + emitter.emit(builder.build(StringUtils.format("%s/evictions", metricPrefix), cacheStats.getNumEvictions())); + emitter.emit(builder.build(StringUtils.format("%s/hitRate", metricPrefix), cacheStats.hitRate())); + emitter.emit(builder.build(StringUtils.format("%s/averageBytes", metricPrefix), cacheStats.averageBytes())); + emitter.emit(builder.build(StringUtils.format("%s/timeouts", metricPrefix), cacheStats.getNumTimeouts())); + emitter.emit(builder.build(StringUtils.format("%s/errors", metricPrefix), cacheStats.getNumErrors())); } } } diff --git a/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java b/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java index 032b9d6d34e..b70da3e5d75 100644 --- a/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java +++ b/server/src/main/java/io/druid/client/coordinator/CoordinatorClient.java @@ -34,6 +34,7 @@ import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import org.jboss.netty.handler.codec.http.HttpMethod; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.joda.time.Interval; @@ -70,7 +71,7 @@ public class CoordinatorClient new Request( HttpMethod.GET, new URL( - String.format( + StringUtils.format( "%s/datasources/%s/intervals/%s/serverview?partial=%s", baseUrl(), dataSource, diff --git a/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java b/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java index b51297fe94f..96b5d9bb7d9 100644 --- a/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java +++ b/server/src/main/java/io/druid/client/indexing/IndexingServiceClient.java @@ -30,6 +30,7 @@ import io.druid.curator.discovery.ServerDiscoverySelector; import io.druid.guice.annotations.Global; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Interval; @@ -100,7 +101,7 @@ public class IndexingServiceClient return client.go( new Request( HttpMethod.POST, - new URL(String.format("%s/task", baseUrl())) + new URL(StringUtils.format("%s/task", baseUrl())) ).setContent(MediaType.APPLICATION_JSON, jsonMapper.writeValueAsBytes(queryObject)), RESPONSE_HANDLER ).get(); diff --git a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java index 78efbece149..a5fd7c6512e 100644 --- a/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java +++ b/server/src/main/java/io/druid/curator/inventory/CuratorInventoryManager.java @@ -24,6 +24,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.MapMaker; import com.google.common.collect.Sets; import io.druid.curator.cache.PathChildrenCacheFactory; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; @@ -246,7 +247,7 @@ public class CuratorInventoryManager if (containers.containsKey(containerKey)) { log.error("New node[%s] but there was already one. That's not good, ignoring new one.", child.getPath()); } else { - final String inventoryPath = String.format("%s/%s", config.getInventoryPath(), containerKey); + final String inventoryPath = StringUtils.format("%s/%s", config.getInventoryPath(), containerKey); PathChildrenCache inventoryCache = cacheFactory.make(curatorFramework, inventoryPath); inventoryCache.getListenable().addListener(new InventoryCacheListener(containerKey, inventoryPath)); diff --git a/server/src/main/java/io/druid/guice/DruidProcessingModule.java b/server/src/main/java/io/druid/guice/DruidProcessingModule.java index ac1b8a267fd..5d8f369652d 100644 --- a/server/src/main/java/io/druid/guice/DruidProcessingModule.java +++ b/server/src/main/java/io/druid/guice/DruidProcessingModule.java @@ -35,6 +35,7 @@ import io.druid.guice.annotations.BackgroundCaching; import io.druid.guice.annotations.Global; import io.druid.guice.annotations.Merging; import io.druid.guice.annotations.Processing; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ExecutorServiceConfig; import io.druid.java.util.common.lifecycle.Lifecycle; import io.druid.java.util.common.logger.Logger; @@ -136,7 +137,7 @@ public class DruidProcessingModule implements Module if (maxDirectMemory < memoryNeeded) { throw new ProvisionException( - String.format( + StringUtils.format( "Not enough direct memory. Please adjust -XX:MaxDirectMemorySize, druid.processing.buffer.sizeBytes, druid.processing.numThreads, or druid.processing.numMergeBuffers: " + "maxDirectMemory[%,d], memoryNeeded[%,d] = druid.processing.buffer.sizeBytes[%,d] * (druid.processing.numMergeBuffers[%,d] + druid.processing.numThreads[%,d] + 1)", maxDirectMemory, diff --git a/server/src/main/java/io/druid/guice/http/HttpClientModule.java b/server/src/main/java/io/druid/guice/http/HttpClientModule.java index b241c4bad04..9d61c700542 100644 --- a/server/src/main/java/io/druid/guice/http/HttpClientModule.java +++ b/server/src/main/java/io/druid/guice/http/HttpClientModule.java @@ -27,6 +27,7 @@ import com.metamx.http.client.HttpClientInit; import io.druid.guice.JsonConfigProvider; import io.druid.guice.LazySingleton; import io.druid.guice.annotations.Global; +import io.druid.java.util.common.StringUtils; import java.lang.annotation.Annotation; @@ -109,7 +110,9 @@ public class HttpClientModule implements Module .withNumConnections(config.getNumConnections()) .withReadTimeout(config.getReadTimeout()) .withWorkerCount(config.getNumMaxThreads()) - .withCompressionCodec(HttpClientConfig.CompressionCodec.valueOf(config.getCompressionCodec().toUpperCase())); + .withCompressionCodec( + HttpClientConfig.CompressionCodec.valueOf(StringUtils.toUpperCase(config.getCompressionCodec())) + ); if (getSslContextBinding() != null) { builder.withSslContext(getSslContextBinding().getProvider().get()); diff --git a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java index 962aa3ccb54..034bd6ad2ab 100644 --- a/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java +++ b/server/src/main/java/io/druid/indexer/SQLMetadataStorageUpdaterJobHandler.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.metadata.SQLMetadataConnector; import io.druid.timeline.DataSegment; @@ -58,7 +59,7 @@ public class SQLMetadataStorageUpdaterJobHandler implements MetadataStorageUpdat public Void withHandle(Handle handle) throws Exception { final PreparedBatch batch = handle.prepareBatch( - String.format( + StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, partitioned, version, used, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :partitioned, :version, :used, :payload)", tableName, connector.getQuoteString() diff --git a/server/src/main/java/io/druid/initialization/Initialization.java b/server/src/main/java/io/druid/initialization/Initialization.java index adeaa3572c1..377c77aa64d 100644 --- a/server/src/main/java/io/druid/initialization/Initialization.java +++ b/server/src/main/java/io/druid/initialization/Initialization.java @@ -234,10 +234,8 @@ public class Initialization if (!extensionDir.isDirectory()) { throw new ISE( - String.format( - "Extension [%s] specified in \"druid.extensions.loadList\" didn't exist!?", - extensionDir.getAbsolutePath() - ) + "Extension [%s] specified in \"druid.extensions.loadList\" didn't exist!?", + extensionDir.getAbsolutePath() ); } extensionsToLoad[i++] = extensionDir; @@ -271,9 +269,7 @@ public class Initialization final File versionDir = new File(hadoopDependencyDir, artifact.getVersion()); // find the hadoop dependency with the version specified in coordinate if (!hadoopDependencyDir.isDirectory() || !versionDir.isDirectory()) { - throw new ISE( - String.format("Hadoop dependency [%s] didn't exist!?", versionDir.getAbsolutePath()) - ); + throw new ISE("Hadoop dependency [%s] didn't exist!?", versionDir.getAbsolutePath()); } hadoopDependenciesToLoad[i++] = versionDir; } diff --git a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java index 9cc6fa41de0..24ce4f2a78c 100644 --- a/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java +++ b/server/src/main/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinator.java @@ -180,7 +180,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor final ResultIterator dbSegments = handle.createQuery( - String.format( + StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start", dbTables.getPendingSegmentsTable(), connector.getQuoteString() ) @@ -219,7 +219,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor sb.append("SELECT payload FROM %s WHERE used = true AND dataSource = ? AND ("); for (int i = 0; i < intervals.size(); i++) { sb.append( - String.format("(start <= ? AND %1$send%1$s >= ?)", connector.getQuoteString()) + StringUtils.format("(start <= ? AND %1$send%1$s >= ?)", connector.getQuoteString()) ); if (i == intervals.size() - 1) { sb.append(")"); @@ -229,7 +229,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor } Query> sql = handle.createQuery( - String.format( + StringUtils.format( sb.toString(), dbTables.getSegmentsTable() ) @@ -402,7 +402,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor { final List existingBytes = handle .createQuery( - String.format( + StringUtils.format( "SELECT payload FROM %s WHERE " + "dataSource = :dataSource AND " + "sequence_name = :sequence_name AND " @@ -560,7 +560,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor ); handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, sequence_name, sequence_prev_id, sequence_name_prev_id_sha1, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :sequence_name, :sequence_prev_id, :sequence_name_prev_id_sha1, :payload)", dbTables.getPendingSegmentsTable(), connector.getQuoteString() @@ -614,7 +614,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor // Avoiding ON DUPLICATE KEY since it's not portable. // Avoiding try/catch since it may cause inadvertent transaction-splitting. handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, partitioned, version, used, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :partitioned, :version, :used, :payload)", dbTables.getSegmentsTable(), connector.getQuoteString() @@ -645,7 +645,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor { return !handle .createQuery( - String.format( + StringUtils.format( "SELECT id FROM %s WHERE id = :identifier", dbTables.getSegmentsTable() ) @@ -758,7 +758,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor if (oldCommitMetadataBytesFromDb == null) { // SELECT -> INSERT can fail due to races; callers must be prepared to retry. final int numRows = handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %s (dataSource, created_date, commit_metadata_payload, commit_metadata_sha1) " + "VALUES (:dataSource, :created_date, :commit_metadata_payload, :commit_metadata_sha1)", dbTables.getDataSourceTable() @@ -774,7 +774,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor } else { // Expecting a particular old metadata; use the SHA1 in a compare-and-swap UPDATE final int numRows = handle.createStatement( - String.format( + StringUtils.format( "UPDATE %s SET " + "commit_metadata_payload = :new_commit_metadata_payload, " + "commit_metadata_sha1 = :new_commit_metadata_sha1 " @@ -810,7 +810,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor public Boolean withHandle(Handle handle) throws Exception { int rows = handle.createStatement( - String.format("DELETE from %s WHERE dataSource = :dataSource", dbTables.getDataSourceTable()) + StringUtils.format("DELETE from %s WHERE dataSource = :dataSource", dbTables.getDataSourceTable()) ) .bind("dataSource", dataSource) .execute(); @@ -838,7 +838,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor public Boolean withHandle(Handle handle) throws Exception { final int numRows = handle.createStatement( - String.format( + StringUtils.format( "UPDATE %s SET " + "commit_metadata_payload = :new_commit_metadata_payload, " + "commit_metadata_sha1 = :new_commit_metadata_sha1 " @@ -897,7 +897,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor private void deleteSegment(final Handle handle, final DataSegment segment) { handle.createStatement( - String.format("DELETE from %s WHERE id = :id", dbTables.getSegmentsTable()) + StringUtils.format("DELETE from %s WHERE id = :id", dbTables.getSegmentsTable()) ) .bind("id", segment.getIdentifier()) .execute(); @@ -907,7 +907,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor { try { handle.createStatement( - String.format("UPDATE %s SET payload = :payload WHERE id = :id", dbTables.getSegmentsTable()) + StringUtils.format("UPDATE %s SET payload = :payload WHERE id = :id", dbTables.getSegmentsTable()) ) .bind("id", segment.getIdentifier()) .bind("payload", jsonMapper.writeValueAsBytes(segment)) @@ -930,7 +930,7 @@ public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStor { return handle .createQuery( - String.format( + StringUtils.format( "SELECT payload FROM %1$s WHERE dataSource = :dataSource and start >= :start and %2$send%2$s <= :end and used = false", dbTables.getSegmentsTable(), connector.getQuoteString() ) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java index b3c1433679a..de913557e22 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataConnector.java @@ -25,6 +25,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import io.druid.java.util.common.ISE; import io.druid.java.util.common.RetryUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import org.apache.commons.dbcp2.BasicDataSource; import org.skife.jdbi.v2.Batch; @@ -211,7 +212,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id VARCHAR(255) NOT NULL,\n" + " dataSource VARCHAR(255) NOT NULL,\n" @@ -236,7 +237,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " dataSource VARCHAR(255) NOT NULL,\n" + " created_date VARCHAR(255) NOT NULL,\n" @@ -255,7 +256,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id VARCHAR(255) NOT NULL,\n" + " dataSource VARCHAR(255) NOT NULL,\n" @@ -270,8 +271,8 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getPayloadType(), getQuoteString() ), - String.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName), - String.format("CREATE INDEX idx_%1$s_used ON %1$s(used)", tableName) + StringUtils.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName), + StringUtils.format("CREATE INDEX idx_%1$s_used ON %1$s(used)", tableName) ) ); } @@ -281,7 +282,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id VARCHAR(255) NOT NULL,\n" + " dataSource VARCHAR(255) NOT NULL,\n" @@ -291,7 +292,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getPayloadType() ), - String.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName) + StringUtils.format("CREATE INDEX idx_%1$s_datasource ON %1$s(dataSource)", tableName) ) ); } @@ -301,7 +302,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " name VARCHAR(255) NOT NULL,\n" + " payload %2$s NOT NULL,\n" @@ -318,7 +319,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id VARCHAR(255) NOT NULL,\n" + " created_date VARCHAR(255) NOT NULL,\n" @@ -330,7 +331,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getPayloadType() ), - String.format("CREATE INDEX idx_%1$s_active_created_date ON %1$s(active, created_date)", tableName) + StringUtils.format("CREATE INDEX idx_%1$s_active_created_date ON %1$s(active, created_date)", tableName) ) ); } @@ -340,7 +341,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id %2$s NOT NULL,\n" + " %4$s_id VARCHAR(255) DEFAULT NULL,\n" @@ -349,7 +350,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getSerialType(), getPayloadType(), entryTypeName ), - String.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName) + StringUtils.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName) ) ); } @@ -359,7 +360,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id %2$s NOT NULL,\n" + " %4$s_id VARCHAR(255) DEFAULT NULL,\n" @@ -368,7 +369,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getSerialType(), getPayloadType(), entryTypeName ), - String.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName) + StringUtils.format("CREATE INDEX idx_%1$s_%2$s_id ON %1$s(%2$s_id)", tableName, entryTypeName) ) ); } @@ -378,7 +379,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id %2$s NOT NULL,\n" + " spec_id VARCHAR(255) NOT NULL,\n" @@ -388,7 +389,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getSerialType(), getPayloadType() ), - String.format("CREATE INDEX idx_%1$s_spec_id ON %1$s(spec_id)", tableName) + StringUtils.format("CREATE INDEX idx_%1$s_spec_id ON %1$s(spec_id)", tableName) ) ); } @@ -410,14 +411,14 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector { int count = handle .createQuery( - String.format("SELECT COUNT(*) FROM %1$s WHERE %2$s = :key", tableName, keyColumn) + StringUtils.format("SELECT COUNT(*) FROM %1$s WHERE %2$s = :key", tableName, keyColumn) ) .bind("key", key) .map(IntegerMapper.FIRST) .first(); if (count == 0) { handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %1$s (%2$s, %3$s) VALUES (:key, :value)", tableName, keyColumn, valueColumn ) @@ -427,7 +428,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector .execute(); } else { handle.createStatement( - String.format( + StringUtils.format( "UPDATE %1$s SET %3$s=:value WHERE %2$s=:key", tableName, keyColumn, valueColumn ) @@ -532,7 +533,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector final String key ) { - final String selectStatement = String.format( + final String selectStatement = StringUtils.format( "SELECT %s FROM %s WHERE %s = :key", valueColumn, tableName, keyColumn ); @@ -606,7 +607,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector createTable( tableName, ImmutableList.of( - String.format( + StringUtils.format( "CREATE TABLE %1$s (\n" + " id %2$s NOT NULL,\n" + " audit_key VARCHAR(255) NOT NULL,\n" @@ -619,9 +620,9 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector + ")", tableName, getSerialType(), getPayloadType() ), - String.format("CREATE INDEX idx_%1$s_key_time ON %1$s(audit_key, created_date)", tableName), - String.format("CREATE INDEX idx_%1$s_type_time ON %1$s(type, created_date)", tableName), - String.format("CREATE INDEX idx_%1$s_audit_time ON %1$s(created_date)", tableName) + StringUtils.format("CREATE INDEX idx_%1$s_key_time ON %1$s(audit_key, created_date)", tableName), + StringUtils.format("CREATE INDEX idx_%1$s_type_time ON %1$s(type, created_date)", tableName), + StringUtils.format("CREATE INDEX idx_%1$s_audit_time ON %1$s(created_date)", tableName) ) ); } diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java index a6e5d01a133..a3fa59e0def 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataRuleManager.java @@ -40,6 +40,7 @@ import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.server.coordinator.rules.ForeverLoadRule; @@ -87,7 +88,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager { List> existing = handle .createQuery( - String.format( + StringUtils.format( "SELECT id from %s where datasource=:dataSource", ruleTable ) @@ -109,12 +110,12 @@ public class SQLMetadataRuleManager implements MetadataRuleManager ); final String version = new DateTime().toString(); handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %s (id, dataSource, version, payload) VALUES (:id, :dataSource, :version, :payload)", ruleTable ) ) - .bind("id", String.format("%s_%s", defaultDatasourceName, version)) + .bind("id", StringUtils.format("%s_%s", defaultDatasourceName, version)) .bind("dataSource", defaultDatasourceName) .bind("version", version) .bind("payload", jsonMapper.writeValueAsBytes(defaultRules)) @@ -239,7 +240,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager { return handle.createQuery( // Return latest version rule by dataSource - String.format( + StringUtils.format( "SELECT r.dataSource, r.payload " + "FROM %1$s r " + "INNER JOIN(SELECT dataSource, max(version) as version FROM %1$s GROUP BY dataSource) ds " @@ -377,12 +378,12 @@ public class SQLMetadataRuleManager implements MetadataRuleManager ); String version = auditTime.toString(); handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %s (id, dataSource, version, payload) VALUES (:id, :dataSource, :version, :payload)", getRulesTable() ) ) - .bind("id", String.format("%s_%s", dataSource, version)) + .bind("id", StringUtils.format("%s_%s", dataSource, version)) .bind("dataSource", dataSource) .bind("version", version) .bind("payload", jsonMapper.writeValueAsBytes(newRules)) @@ -394,7 +395,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager ); } catch (Exception e) { - log.error(e, String.format("Exception while overriding rule for %s", dataSource)); + log.error(e, StringUtils.format("Exception while overriding rule for %s", dataSource)); return false; } } @@ -402,7 +403,7 @@ public class SQLMetadataRuleManager implements MetadataRuleManager poll(); } catch (Exception e) { - log.error(e, String.format("Exception while polling for rules after overriding the rule for %s", dataSource)); + log.error(e, StringUtils.format("Exception while polling for rules after overriding the rule for %s", dataSource)); } return true; } diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java index 9388c78a7ab..0c0255384ef 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentManager.java @@ -38,6 +38,7 @@ import io.druid.client.DruidDataSource; import io.druid.concurrent.Execs; import io.druid.guice.ManageLifecycle; import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.timeline.DataSegment; @@ -178,7 +179,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager ) throws Exception { return handle - .createQuery(String.format( + .createQuery(StringUtils.format( "SELECT payload FROM %s WHERE dataSource = :dataSource", getSegmentsTable() )) @@ -247,7 +248,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager for (DataSegment segment : segments) { batch.add( - String.format( + StringUtils.format( "UPDATE %s SET used=true WHERE id = '%s'", getSegmentsTable(), segment.getIdentifier() @@ -280,7 +281,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager public Void withHandle(Handle handle) throws Exception { handle.createStatement( - String.format("UPDATE %s SET used=true WHERE id = :id", getSegmentsTable()) + StringUtils.format("UPDATE %s SET used=true WHERE id = :id", getSegmentsTable()) ) .bind("id", segmentId) .execute(); @@ -315,7 +316,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager public Void withHandle(Handle handle) throws Exception { handle.createStatement( - String.format("UPDATE %s SET used=false WHERE dataSource = :dataSource", getSegmentsTable()) + StringUtils.format("UPDATE %s SET used=false WHERE dataSource = :dataSource", getSegmentsTable()) ) .bind("dataSource", ds) .execute(); @@ -346,7 +347,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager public Void withHandle(Handle handle) throws Exception { handle.createStatement( - String.format("UPDATE %s SET used=false WHERE id = :segmentID", getSegmentsTable()) + StringUtils.format("UPDATE %s SET used=false WHERE id = :segmentID", getSegmentsTable()) ).bind("segmentID", segmentID) .execute(); @@ -406,7 +407,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager public List withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format("SELECT DISTINCT(datasource) FROM %s", getSegmentsTable()) + StringUtils.format("SELECT DISTINCT(datasource) FROM %s", getSegmentsTable()) ) .fold( Lists.newArrayList(), @@ -458,7 +459,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager public List inTransaction(Handle handle, TransactionStatus status) throws Exception { return handle - .createQuery(String.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())) + .createQuery(StringUtils.format("SELECT payload FROM %s WHERE used=true", getSegmentsTable())) .setFetchSize(connector.getStreamingFetchSize()) .map( new ResultSetMapper() @@ -555,7 +556,7 @@ public class SQLMetadataSegmentManager implements MetadataSegmentManager { Iterator iter = handle .createQuery( - String.format( + StringUtils.format( "SELECT start, %2$send%2$s FROM %1$s WHERE dataSource = :dataSource and start >= :start and %2$send%2$s <= :end and used = false ORDER BY start, %2$send%2$s", getSegmentsTable(), connector.getQuoteString() ) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java index 294ec331e0f..4f95a2ad898 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSegmentPublisher.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; @@ -54,7 +55,7 @@ public class SQLMetadataSegmentPublisher implements MetadataSegmentPublisher this.jsonMapper = jsonMapper; this.config = config; this.connector = connector; - this.statement = String.format( + this.statement = StringUtils.format( "INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, partitioned, version, used, payload) " + "VALUES (:id, :dataSource, :created_date, :start, :end, :partitioned, :version, :used, :payload)", config.getSegmentsTable(), connector.getQuoteString() @@ -99,7 +100,7 @@ public class SQLMetadataSegmentPublisher implements MetadataSegmentPublisher public List> withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable()) + StringUtils.format("SELECT id FROM %s WHERE id=:id", config.getSegmentsTable()) ) .bind("id", identifier) .list(); diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java index 5203cbad816..3fcc211d0d3 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataStorageActionHandler.java @@ -105,7 +105,7 @@ public class SQLMetadataStorageActionHandler withHandle(Handle handle) throws Exception { byte[] res = handle.createQuery( - String.format("SELECT payload FROM %s WHERE id = :id", entryTable) + StringUtils.format("SELECT payload FROM %s WHERE id = :id", entryTable) ) .bind("id", entryId) .map(ByteArrayMapper.FIRST) @@ -204,7 +204,7 @@ public class SQLMetadataStorageActionHandler withHandle(Handle handle) throws Exception { byte[] res = handle.createQuery( - String.format("SELECT status_payload FROM %s WHERE id = :id", entryTable) + StringUtils.format("SELECT status_payload FROM %s WHERE id = :id", entryTable) ) .bind("id", entryId) .map(ByteArrayMapper.FIRST) @@ -229,7 +229,7 @@ public class SQLMetadataStorageActionHandler= :start ORDER BY created_date DESC", entryTable ) @@ -318,7 +318,7 @@ public class SQLMetadataStorageActionHandler withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format( + StringUtils.format( "SELECT id, lock_payload FROM %1$s WHERE %2$s_id = :entryId", lockTable, entryTypeName ) diff --git a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java index bf2bc580a7d..6d65fbf9ca5 100644 --- a/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java +++ b/server/src/main/java/io/druid/metadata/SQLMetadataSupervisorManager.java @@ -33,6 +33,7 @@ import io.druid.guice.annotations.Json; import io.druid.indexing.overlord.supervisor.SupervisorSpec; import io.druid.indexing.overlord.supervisor.VersionedSupervisorSpec; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import org.joda.time.DateTime; @@ -88,7 +89,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager public Void withHandle(Handle handle) throws Exception { handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %s (spec_id, created_date, payload) VALUES (:spec_id, :created_date, :payload)", getSupervisorsTable() ) @@ -115,7 +116,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager public Map> withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format( + StringUtils.format( "SELECT id, spec_id, created_date, payload FROM %1$s ORDER BY id DESC", getSupervisorsTable() ) @@ -187,7 +188,7 @@ public class SQLMetadataSupervisorManager implements MetadataSupervisorManager public Map withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format( + StringUtils.format( "SELECT r.spec_id, r.payload " + "FROM %1$s r " + "INNER JOIN(SELECT spec_id, max(id) as id FROM %1$s GROUP BY spec_id) latest " diff --git a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java index f96f8eec932..38ce6e737e1 100644 --- a/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java +++ b/server/src/main/java/io/druid/metadata/storage/derby/DerbyConnector.java @@ -22,6 +22,7 @@ package io.druid.metadata.storage.derby; import com.google.common.base.Supplier; import com.google.inject.Inject; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.lifecycle.LifecycleStart; import io.druid.java.util.common.lifecycle.LifecycleStop; import io.druid.java.util.common.logger.Logger; @@ -76,7 +77,7 @@ public class DerbyConnector extends SQLMetadataConnector public boolean tableExists(Handle handle, String tableName) { return !handle.createQuery("select * from SYS.SYSTABLES where tablename = :tableName") - .bind("tableName", tableName.toUpperCase()) + .bind("tableName", StringUtils.toUpperCase(tableName)) .list() .isEmpty(); } diff --git a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java index b1fc363ee1f..44eb18dea8f 100644 --- a/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java +++ b/server/src/main/java/io/druid/segment/indexing/granularity/ArbitraryGranularitySpec.java @@ -28,6 +28,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.PeekingIterator; import com.google.common.collect.Sets; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.IAE; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Comparators; @@ -72,13 +73,7 @@ public class ArbitraryGranularitySpec implements GranularitySpec if (intervalIterator.hasNext()) { final Interval nextInterval = intervalIterator.peek(); if (currentInterval.overlaps(nextInterval)) { - throw new IllegalArgumentException( - String.format( - "Overlapping intervals: %s, %s", - currentInterval, - nextInterval - ) - ); + throw new IAE("Overlapping intervals: %s, %s", currentInterval, nextInterval); } } } diff --git a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java index 22d68091512..03fca2f3d53 100644 --- a/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java +++ b/server/src/main/java/io/druid/segment/loading/LocalDataSegmentPuller.java @@ -24,6 +24,7 @@ import com.google.common.io.Files; import io.druid.java.util.common.CompressionUtils; import io.druid.java.util.common.FileUtils; import io.druid.java.util.common.MapUtils; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.UOE; import io.druid.java.util.common.logger.Logger; import io.druid.timeline.DataSegment; @@ -211,7 +212,7 @@ public class LocalDataSegmentPuller implements DataSegmentPuller, URIDataPuller @Override public String getVersion(URI uri) { - return String.format("%d", buildFileObject(uri).getLastModified()); + return StringUtils.format("%d", buildFileObject(uri).getLastModified()); } @Override diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java index 4e3062a5d0c..7486fbddfc0 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorImpl.java @@ -48,6 +48,7 @@ import io.druid.data.input.InputRow; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.query.Query; import io.druid.query.QueryRunner; import io.druid.query.QueryRunnerFactoryConglomerate; @@ -372,7 +373,7 @@ public class AppenderatorImpl implements Appenderator log.info("Submitting persist runnable for dataSource[%s]", schema.getDataSource()); - final String threadName = String.format("%s-incremental-persist", schema.getDataSource()); + final String threadName = StringUtils.format("%s-incremental-persist", schema.getDataSource()); final Object commitMetadata = committer.getMetadata(); final Stopwatch runExecStopwatch = Stopwatch.createStarted(); final Stopwatch persistStopwatch = Stopwatch.createStarted(); @@ -396,7 +397,7 @@ public class AppenderatorImpl implements Appenderator @Override public String apply(Map.Entry entry) { - return String.format("%s:%d", entry.getKey().getIdentifierAsString(), entry.getValue()); + return StringUtils.format("%s:%d", entry.getKey().getIdentifierAsString(), entry.getValue()); } } ) diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java index 411d7500f93..16a9c86f494 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/AppenderatorPlumber.java @@ -36,6 +36,7 @@ import io.druid.concurrent.Execs; import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.guava.Sequence; @@ -355,7 +356,7 @@ public class AppenderatorPlumber implements Plumber ), new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)), new ThreadRenamingCallable( - String.format( + StringUtils.format( "%s-overseer-%d", schema.getDataSource(), config.getShardSpec().getPartitionNum() diff --git a/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentNotWritableException.java b/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentNotWritableException.java index 354866a5cd4..7f6e6cd17ad 100644 --- a/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentNotWritableException.java +++ b/server/src/main/java/io/druid/segment/realtime/appenderator/SegmentNotWritableException.java @@ -19,10 +19,12 @@ package io.druid.segment.realtime.appenderator; +import io.druid.java.util.common.StringUtils; + public class SegmentNotWritableException extends Exception { public SegmentNotWritableException(String message, Object... messageArgs) { - super(String.format(message, messageArgs)); + super(StringUtils.nonStrictFormat(message, messageArgs)); } } diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java index 217874eb9ee..0294103b132 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/FlushingPlumber.java @@ -27,6 +27,7 @@ import io.druid.client.cache.Cache; import io.druid.client.cache.CacheConfig; import io.druid.common.guava.ThreadRenamingCallable; import io.druid.concurrent.Execs; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.query.QueryRunnerFactoryConglomerate; @@ -164,7 +165,7 @@ public class FlushingPlumber extends RealtimePlumber ), new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)), new ThreadRenamingCallable( - String.format( + StringUtils.format( "%s-flusher-%d", getSchema().getDataSource(), getConfig().getShardSpec().getPartitionNum() diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java index 1f516db2c20..1773abdf695 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java @@ -20,6 +20,7 @@ package io.druid.segment.realtime.plumber; import io.druid.common.utils.JodaUtils; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -81,7 +82,7 @@ public class MessageTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public String toString() { - return String.format("messageTime-%s", windowPeriod); + return StringUtils.format("messageTime-%s", windowPeriod); } } } diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java index 1e1b3cb1809..538440b2d30 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/RealtimePlumber.java @@ -44,6 +44,7 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.Query; @@ -285,7 +286,7 @@ public class RealtimePlumber implements Plumber ); persistExecutor.execute( - new ThreadRenamingRunnable(String.format("%s-incremental-persist", schema.getDataSource())) + new ThreadRenamingRunnable(StringUtils.format("%s-incremental-persist", schema.getDataSource())) { @Override public void doRun() @@ -352,7 +353,7 @@ public class RealtimePlumber implements Plumber // Submits persist-n-merge task for a Sink to the mergeExecutor private void persistAndMerge(final long truncatedTime, final Sink sink) { - final String threadName = String.format( + final String threadName = StringUtils.format( "%s-%s-persist-n-merge", schema.getDataSource(), new DateTime(truncatedTime) ); mergeExecutor.execute( @@ -760,7 +761,7 @@ public class RealtimePlumber implements Plumber ), new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)), new ThreadRenamingCallable( - String.format( + StringUtils.format( "%s-overseer-%d", schema.getDataSource(), config.getShardSpec().getPartitionNum() diff --git a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java index 12c05813012..a52639b02c3 100644 --- a/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java +++ b/server/src/main/java/io/druid/segment/realtime/plumber/ServerTimeRejectionPolicyFactory.java @@ -19,6 +19,7 @@ package io.druid.segment.realtime.plumber; +import io.druid.java.util.common.StringUtils; import org.joda.time.DateTime; import org.joda.time.Period; @@ -51,7 +52,7 @@ public class ServerTimeRejectionPolicyFactory implements RejectionPolicyFactory @Override public String toString() { - return String.format("serverTime-%s", windowPeriod); + return StringUtils.format("serverTime-%s", windowPeriod); } }; } diff --git a/server/src/main/java/io/druid/server/QueryResource.java b/server/src/main/java/io/druid/server/QueryResource.java index b1f1013f47e..7882424a14f 100644 --- a/server/src/main/java/io/druid/server/QueryResource.java +++ b/server/src/main/java/io/druid/server/QueryResource.java @@ -35,6 +35,7 @@ import io.druid.client.DirectDruidClient; import io.druid.guice.annotations.Json; import io.druid.guice.annotations.Smile; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.guava.Yielder; @@ -209,7 +210,7 @@ public class QueryResource implements QueryCountStatsProvider toolChest = warehouse.getToolChest(query); Thread.currentThread() - .setName(String.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId)); + .setName(StringUtils.format("%s[%s_%s_%s]", currThreadName, query.getType(), query.getDataSource().getNames(), queryId)); if (log.isDebugEnabled()) { log.debug("Got query [%s]", query); } diff --git a/server/src/main/java/io/druid/server/StatusResource.java b/server/src/main/java/io/druid/server/StatusResource.java index 127c840be41..77e00844687 100644 --- a/server/src/main/java/io/druid/server/StatusResource.java +++ b/server/src/main/java/io/druid/server/StatusResource.java @@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.sun.jersey.spi.container.ResourceFilters; import io.druid.initialization.DruidModule; import io.druid.initialization.Initialization; +import io.druid.java.util.common.StringUtils; import io.druid.server.http.security.StateResourceFilter; import javax.ws.rs.GET; @@ -88,7 +89,7 @@ public class StatusResource { final String NL = System.getProperty("line.separator"); StringBuilder output = new StringBuilder(); - output.append(String.format("Druid version - %s", version)).append(NL).append(NL); + output.append(StringUtils.format("Druid version - %s", version)).append(NL).append(NL); if (modules.size() > 0) { output.append("Registered Druid Modules").append(NL); @@ -156,9 +157,9 @@ public class StatusResource public String toString() { if (artifact == null || artifact.isEmpty()) { - return String.format(" - %s ", name); + return StringUtils.format(" - %s ", name); } else { - return String.format(" - %s (%s-%s)", name, artifact, version); + return StringUtils.format(" - %s (%s-%s)", name, artifact, version); } } } diff --git a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java index a9356fe2905..e1de8612e43 100644 --- a/server/src/main/java/io/druid/server/audit/SQLAuditManager.java +++ b/server/src/main/java/io/druid/server/audit/SQLAuditManager.java @@ -29,6 +29,7 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditManager; import io.druid.guice.ManageLifecycle; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.MetadataStorageTablesConfig; import io.druid.metadata.SQLMetadataConnector; @@ -105,7 +106,7 @@ public class SQLAuditManager implements AuditManager ); handle.createStatement( - String.format( + StringUtils.format( "INSERT INTO %s ( audit_key, type, author, comment, created_date, payload) VALUES (:audit_key, :type, :author, :comment, :created_date, :payload)", getAuditTable() ) @@ -130,7 +131,7 @@ public class SQLAuditManager implements AuditManager public List withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format( + StringUtils.format( "SELECT payload FROM %s WHERE audit_key = :audit_key and type = :type and created_date between :start_date and :end_date ORDER BY created_date", getAuditTable() ) @@ -191,7 +192,7 @@ public class SQLAuditManager implements AuditManager public List withHandle(Handle handle) throws Exception { return handle.createQuery( - String.format( + StringUtils.format( "SELECT payload FROM %s WHERE type = :type and created_date between :start_date and :end_date ORDER BY created_date", getAuditTable() ) @@ -239,7 +240,7 @@ public class SQLAuditManager implements AuditManager throws IllegalArgumentException { final int theLimit = getLimit(limit); - String queryString = String.format("SELECT payload FROM %s WHERE type = :type", getAuditTable()); + String queryString = StringUtils.format("SELECT payload FROM %s WHERE type = :type", getAuditTable()); if (key != null) { queryString += " and audit_key = :audit_key"; } diff --git a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java index b73322f726e..a8d495032d3 100644 --- a/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java +++ b/server/src/main/java/io/druid/server/coordination/BatchDataSegmentAnnouncer.java @@ -33,6 +33,7 @@ import com.google.inject.Inject; import io.druid.common.utils.UUIDUtils; import io.druid.curator.announcement.Announcer; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.server.initialization.BatchDataSegmentAnnouncerConfig; import io.druid.server.initialization.ZkPathsConfig; @@ -319,7 +320,7 @@ public class BatchDataSegmentAnnouncer implements DataSegmentAnnouncer private String makeServedSegmentPath(String zNode) { - return ZKPaths.makePath(liveSegmentLocation, String.format("%s%s", zNode, counter.getAndIncrement())); + return ZKPaths.makePath(liveSegmentLocation, StringUtils.format("%s%s", zNode, counter.getAndIncrement())); } private class SegmentZNode implements Comparable diff --git a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestDrop.java b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestDrop.java index ddbc49b5dac..5cb7c18d3a6 100644 --- a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestDrop.java +++ b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestDrop.java @@ -22,6 +22,7 @@ package io.druid.server.coordination; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonUnwrapped; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; /** @@ -54,7 +55,7 @@ public class SegmentChangeRequestDrop implements DataSegmentChangeRequest @Override public String asString() { - return String.format("DROP: %s", segment.getIdentifier()); + return StringUtils.format("DROP: %s", segment.getIdentifier()); } @Override diff --git a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java index 5df40c4ca31..4015bf1ca39 100644 --- a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java +++ b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestHistory.java @@ -163,7 +163,7 @@ public class SegmentChangeRequestHistory // Note: counter reset is requested when client ask for "maxSize" number of changes even if all those changes // are present in the history because one extra elements is needed to match the counter hash. return SegmentChangeRequestsSnapshot.fail( - StringUtils.safeFormat( + StringUtils.format( "can't serve request, not enough history is kept. given counter [%s] and current last counter [%s]", counter, lastCounter diff --git a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestLoad.java b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestLoad.java index b143da54ffa..9ad6f667f6a 100644 --- a/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestLoad.java +++ b/server/src/main/java/io/druid/server/coordination/SegmentChangeRequestLoad.java @@ -22,6 +22,7 @@ package io.druid.server.coordination; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonUnwrapped; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; /** @@ -54,7 +55,7 @@ public class SegmentChangeRequestLoad implements DataSegmentChangeRequest @Override public String asString() { - return String.format("LOAD: %s", segment.getIdentifier()); + return StringUtils.format("LOAD: %s", segment.getIdentifier()); } @Override diff --git a/server/src/main/java/io/druid/server/coordination/ServerType.java b/server/src/main/java/io/druid/server/coordination/ServerType.java index f0acf83edd6..b42b7a4c99c 100644 --- a/server/src/main/java/io/druid/server/coordination/ServerType.java +++ b/server/src/main/java/io/druid/server/coordination/ServerType.java @@ -21,6 +21,7 @@ package io.druid.server.coordination; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; +import io.druid.java.util.common.StringUtils; /** * This enum represents types of druid services that hold segments. @@ -79,13 +80,13 @@ public enum ServerType @JsonCreator public static ServerType fromString(String type) { - return ServerType.valueOf(type.toUpperCase().replace("-", "_")); + return ServerType.valueOf(StringUtils.toUpperCase(type).replace("-", "_")); } @Override @JsonValue public String toString() { - return name().toLowerCase().replace("_", "-"); + return StringUtils.toLowerCase(name()).replace("_", "-"); } } diff --git a/server/src/main/java/io/druid/server/coordinator/ReplicationThrottler.java b/server/src/main/java/io/druid/server/coordinator/ReplicationThrottler.java index 6d55e751b87..dcf69b5381c 100644 --- a/server/src/main/java/io/druid/server/coordinator/ReplicationThrottler.java +++ b/server/src/main/java/io/druid/server/coordinator/ReplicationThrottler.java @@ -22,6 +22,7 @@ package io.druid.server.coordinator; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.metamx.emitter.EmittingLogger; +import io.druid.java.util.common.StringUtils; import java.util.List; import java.util.Map; @@ -167,7 +168,7 @@ public class ReplicationThrottler List retVal = Lists.newArrayList(); for (Map.Entry entry : segments.entrySet()) { retVal.add( - String.format("%s ON %s", entry.getKey(), entry.getValue()) + StringUtils.format("%s ON %s", entry.getKey(), entry.getValue()) ); } return retVal; diff --git a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java index a5989028e24..89bf05b4602 100644 --- a/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java +++ b/server/src/main/java/io/druid/server/coordinator/helper/DruidCoordinatorBalancer.java @@ -24,6 +24,7 @@ import com.google.common.collect.Maps; import com.google.common.collect.MinMaxPriorityQueue; import com.metamx.emitter.EmittingLogger; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Comparators; import io.druid.server.coordinator.BalancerSegmentHolder; import io.druid.server.coordinator.BalancerStrategy; @@ -193,7 +194,7 @@ public class DruidCoordinatorBalancer implements DruidCoordinatorHelper ); } catch (Exception e) { - log.makeAlert(e, String.format("[%s] : Moving exception", segmentName)).emit(); + log.makeAlert(e, StringUtils.format("[%s] : Moving exception", segmentName)).emit(); if (callback != null) { callback.execute(); } diff --git a/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java b/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java index 52435f86cf5..632b5af4f6d 100644 --- a/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java +++ b/server/src/main/java/io/druid/server/http/CoordinatorRedirectInfo.java @@ -22,6 +22,7 @@ package io.druid.server.http; import com.google.common.base.Throwables; import com.google.common.collect.ImmutableSet; import com.google.inject.Inject; +import io.druid.java.util.common.StringUtils; import io.druid.server.coordinator.DruidCoordinator; import java.net.URL; @@ -59,10 +60,10 @@ public class CoordinatorRedirectInfo implements RedirectInfo return null; } - String location = String.format("http://%s%s", leader, requestURI); + String location = StringUtils.format("http://%s%s", leader, requestURI); if (queryString != null) { - location = String.format("%s?%s", location, queryString); + location = StringUtils.format("%s?%s", location, queryString); } return new URL(location); diff --git a/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java b/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java index 61fc28f1626..926df47f07b 100644 --- a/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/ConfigResourceFilter.java @@ -22,6 +22,7 @@ package io.druid.server.http.security; import com.google.common.base.Preconditions; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; @@ -67,7 +68,7 @@ public class ConfigResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException( Response.status(Response.Status.FORBIDDEN) - .entity(String.format("Access-Check-Result: %s", authResult.toString())) + .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) .build() ); } diff --git a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java index ccbeab86600..073214ba393 100644 --- a/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/DatasourceResourceFilter.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; @@ -83,7 +84,7 @@ public class DatasourceResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException( Response.status(Response.Status.FORBIDDEN) - .entity(String.format("Access-Check-Result: %s", authResult.toString())) + .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) .build() ); } diff --git a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java index 0e87fab200f..1f73bd3b984 100644 --- a/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/RulesResourceFilter.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; @@ -83,7 +84,7 @@ public class RulesResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException( Response.status(Response.Status.FORBIDDEN) - .entity(String.format("Access-Check-Result: %s", authResult.toString())) + .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) .build() ); } diff --git a/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java b/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java index 39d825130c3..cec7ecd21c2 100644 --- a/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java +++ b/server/src/main/java/io/druid/server/http/security/StateResourceFilter.java @@ -22,6 +22,7 @@ package io.druid.server.http.security; import com.google.common.base.Preconditions; import com.google.inject.Inject; import com.sun.jersey.spi.container.ContainerRequest; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.AuthConfig; import io.druid.server.security.AuthorizationInfo; @@ -73,7 +74,7 @@ public class StateResourceFilter extends AbstractResourceFilter if (!authResult.isAllowed()) { throw new WebApplicationException( Response.status(Response.Status.FORBIDDEN) - .entity(String.format("Access-Check-Result: %s", authResult.toString())) + .entity(StringUtils.format("Access-Check-Result: %s", authResult.toString())) .build() ); } diff --git a/server/src/main/java/io/druid/server/log/FileRequestLogger.java b/server/src/main/java/io/druid/server/log/FileRequestLogger.java index 4f3fefbc9e8..a35fc54b8eb 100644 --- a/server/src/main/java/io/druid/server/log/FileRequestLogger.java +++ b/server/src/main/java/io/druid/server/log/FileRequestLogger.java @@ -22,6 +22,7 @@ package io.druid.server.log; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Charsets; import com.google.common.base.Throwables; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.lifecycle.LifecycleStart; @@ -125,7 +126,7 @@ public class FileRequestLogger implements RequestLogger { synchronized (lock) { fileWriter.write( - String.format("%s%n", requestLogLine.getLine(objectMapper)) + StringUtils.format("%s%n", requestLogLine.getLine(objectMapper)) ); fileWriter.flush(); } diff --git a/server/src/main/java/io/druid/server/security/Access.java b/server/src/main/java/io/druid/server/security/Access.java index a70e579f3a4..69fee1386bc 100644 --- a/server/src/main/java/io/druid/server/security/Access.java +++ b/server/src/main/java/io/druid/server/security/Access.java @@ -19,6 +19,8 @@ package io.druid.server.security; +import io.druid.java.util.common.StringUtils; + public class Access { private final boolean allowed; @@ -46,6 +48,6 @@ public class Access @Override public String toString() { - return String.format("Allowed:%s, Message:%s", allowed, message); + return StringUtils.format("Allowed:%s, Message:%s", allowed, message); } } diff --git a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java index 77d25cbddbf..7d790f9e148 100644 --- a/server/src/test/java/io/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/io/druid/client/CachingClusteredClientTest.java @@ -57,6 +57,7 @@ import io.druid.hll.HyperLogLogCollector; import io.druid.jackson.DefaultObjectMapper; import io.druid.java.util.common.ISE; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.java.util.common.granularity.PeriodGranularity; @@ -1999,7 +2000,7 @@ public class CachingClusteredClientTest MultipleSpecificSegmentSpec spec = (MultipleSpecificSegmentSpec)query.getQuerySegmentSpec(); List> ret = Lists.newArrayList(); for (SegmentDescriptor descriptor : spec.getDescriptors()) { - String id = String.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()); + String id = StringUtils.format("%s_%s", queryIntervals.indexOf(descriptor.getInterval()), descriptor.getPartitionNumber()); int index = segmentIds.indexOf(id); if (index != -1) { ret.add(new Result( @@ -2250,7 +2251,7 @@ public class CachingClusteredClientTest DataSegment mockSegment = makeMock(mocks, DataSegment.class); ServerExpectation expectation = new ServerExpectation( - String.format("%s_%s", k, j), // interval/chunk + StringUtils.format("%s_%s", k, j), // interval/chunk queryIntervals.get(k), mockSegment, expectedResults.get(k).get(j) diff --git a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java index d70989a8abf..0c61e092d1f 100644 --- a/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java +++ b/server/src/test/java/io/druid/client/cache/CacheDistributionTest.java @@ -41,6 +41,7 @@ import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; @@ -80,7 +81,8 @@ public class CacheDistributionTest @BeforeClass public static void header() { System.out.printf( - "%25s\t%5s\t%10s\t%10s\t%10s\t%10s\t%10s\t%7s\t%5s\n", + Locale.ENGLISH, + "%25s\t%5s\t%10s\t%10s\t%10s\t%10s\t%10s\t%7s\t%5s%n", "hash", "reps", "node 1", "node 2", "node 3", "node 4", "node 5", "min/max", "ns" ); } @@ -130,13 +132,13 @@ public class CacheDistributionTest long min = Long.MAX_VALUE; long max = 0; - System.out.printf("%25s\t%5d\t", hash, reps); + System.out.printf(Locale.ENGLISH, "%25s\t%5d\t", hash, reps); for(AtomicLong count : counter.values()) { - System.out.printf("%10d\t", count.get()); + System.out.printf(Locale.ENGLISH, "%10d\t", count.get()); min = Math.min(min, count.get()); max = Math.max(max, count.get()); } - System.out.printf("%7.2f\t%5.0f\n", (double) min / (double) max, (double)t / KEY_COUNT); + System.out.printf(Locale.ENGLISH, "%7.2f\t%5.0f%n", (double) min / (double) max, (double)t / KEY_COUNT); } private static MemcachedNode dummyNode(String host, int port) { diff --git a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java index 223e123eca5..c80598511e5 100644 --- a/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java +++ b/server/src/test/java/io/druid/curator/discovery/ServerDiscoverySelectorTest.java @@ -20,6 +20,7 @@ package io.druid.curator.discovery; import io.druid.client.selector.Server; +import io.druid.java.util.common.StringUtils; import org.apache.curator.x.discovery.ServiceInstance; import org.apache.curator.x.discovery.ServiceProvider; import org.easymock.EasyMock; @@ -101,7 +102,7 @@ public class ServerDiscoverySelectorTest null ); Assert.assertEquals(PORT, uri.getPort()); - Assert.assertEquals(String.format("[%s]", ADDRESS), uri.getHost()); + Assert.assertEquals(StringUtils.format("[%s]", ADDRESS), uri.getHost()); Assert.assertEquals("http", uri.getScheme()); } diff --git a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java index 73583540b64..06a665a15f5 100644 --- a/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java +++ b/server/src/test/java/io/druid/guice/JsonConfigTesterBase.java @@ -26,6 +26,7 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.name.Names; import io.druid.initialization.Initialization; +import io.druid.java.util.common.StringUtils; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Before; @@ -56,7 +57,7 @@ public abstract class JsonConfigTesterBase protected Properties testProperties = new Properties(); protected static String getPropertyKey(String fieldName){ - return String.format( + return StringUtils.format( "%s.%s", configPrefix, fieldName ); @@ -93,9 +94,9 @@ public abstract class JsonConfigTesterBase final String propertyKey = getPropertyKey(field); if (null != propertyKey) { field.setAccessible(true); - String getter = String.format( + String getter = StringUtils.format( "get%s%s", - field.getName().substring(0, 1).toUpperCase(), + StringUtils.toUpperCase(field.getName().substring(0, 1)), field.getName().substring(1) ); Method method = clazz.getDeclaredMethod(getter); diff --git a/server/src/test/java/io/druid/initialization/ZkPathsConfigTest.java b/server/src/test/java/io/druid/initialization/ZkPathsConfigTest.java index 879f3e4c54e..3848a2cfc60 100644 --- a/server/src/test/java/io/druid/initialization/ZkPathsConfigTest.java +++ b/server/src/test/java/io/druid/initialization/ZkPathsConfigTest.java @@ -25,6 +25,7 @@ import io.druid.guice.JsonConfigProvider; import io.druid.guice.JsonConfigTesterBase; import io.druid.guice.JsonConfigurator; import io.druid.guice.annotations.Json; +import io.druid.java.util.common.StringUtils; import io.druid.server.initialization.ZkPathsConfig; import org.apache.curator.utils.ZKPaths; import org.junit.Assert; @@ -48,18 +49,18 @@ public class ZkPathsConfigTest extends JsonConfigTesterBase JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(configPrefix, ZkPathsConfig.class); testProperties.clear(); String base = UUID.randomUUID().toString(); - testProperties.put(String.format("%s.base", configPrefix), base); + testProperties.put(StringUtils.format("%s.base", configPrefix), base); zkPathsConfig.inject(testProperties, configurator); propertyValues.clear(); - propertyValues.put(String.format("%s.base", configPrefix), base); - propertyValues.put(String.format("%s.propertiesPath", configPrefix), ZKPaths.makePath(base, "properties")); - propertyValues.put(String.format("%s.announcementsPath", configPrefix), ZKPaths.makePath(base, "announcements")); - propertyValues.put(String.format("%s.servedSegmentsPath", configPrefix), ZKPaths.makePath(base, "servedSegments")); - propertyValues.put(String.format("%s.liveSegmentsPath", configPrefix), ZKPaths.makePath(base, "segments")); - propertyValues.put(String.format("%s.coordinatorPath", configPrefix), ZKPaths.makePath(base, "coordinator")); - propertyValues.put(String.format("%s.loadQueuePath", configPrefix), ZKPaths.makePath(base, "loadQueue")); - propertyValues.put(String.format("%s.connectorPath", configPrefix), ZKPaths.makePath(base, "connector")); + propertyValues.put(StringUtils.format("%s.base", configPrefix), base); + propertyValues.put(StringUtils.format("%s.propertiesPath", configPrefix), ZKPaths.makePath(base, "properties")); + propertyValues.put(StringUtils.format("%s.announcementsPath", configPrefix), ZKPaths.makePath(base, "announcements")); + propertyValues.put(StringUtils.format("%s.servedSegmentsPath", configPrefix), ZKPaths.makePath(base, "servedSegments")); + propertyValues.put(StringUtils.format("%s.liveSegmentsPath", configPrefix), ZKPaths.makePath(base, "segments")); + propertyValues.put(StringUtils.format("%s.coordinatorPath", configPrefix), ZKPaths.makePath(base, "coordinator")); + propertyValues.put(StringUtils.format("%s.loadQueuePath", configPrefix), ZKPaths.makePath(base, "loadQueue")); + propertyValues.put(StringUtils.format("%s.connectorPath", configPrefix), ZKPaths.makePath(base, "connector")); ZkPathsConfig zkPathsConfigObj = zkPathsConfig.get().get(); validateEntries(zkPathsConfigObj); diff --git a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java index 7c3a9390a08..f493d07298b 100644 --- a/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java +++ b/server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java @@ -27,6 +27,7 @@ import io.druid.indexing.overlord.DataSourceMetadata; import io.druid.indexing.overlord.ObjectMetadata; import io.druid.indexing.overlord.SegmentPublishResult; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import io.druid.timeline.partition.NoneShardSpec; @@ -207,7 +208,7 @@ public class IndexerSQLMetadataStorageCoordinatorTest public Integer withHandle(Handle handle) throws Exception { return handle.createStatement( - String.format( + StringUtils.format( "UPDATE %s SET used = false WHERE id = :id", derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable() ) diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java index ace2bf6507c..660c985fa17 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataConnectorTest.java @@ -72,7 +72,7 @@ public class SQLMetadataConnectorTest { for (String table : tables) { Assert.assertTrue( - String.format("table %s was not created!", table), + StringUtils.format("table %s was not created!", table), connector.tableExists(handle, table) ); } @@ -131,7 +131,7 @@ public class SQLMetadataConnectorTest @Override public Void withHandle(Handle handle) throws Exception { - handle.createStatement(String.format("DROP TABLE %s", tableName)) + handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); return null; } diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java index 4d2eb5eff06..954388a5700 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataRuleManagerTest.java @@ -29,6 +29,7 @@ import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.client.DruidServer; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.server.audit.SQLAuditManager; import io.druid.server.audit.SQLAuditManagerConfig; import io.druid.server.coordinator.rules.IntervalLoadRule; @@ -201,7 +202,7 @@ public class SQLMetadataRuleManagerTest @Override public Void withHandle(Handle handle) throws Exception { - handle.createStatement(String.format("DROP TABLE %s", tableName)) + handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); return null; } diff --git a/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java b/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java index 0b3fee42d00..deeb7aea8f9 100644 --- a/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java +++ b/server/src/test/java/io/druid/metadata/SQLMetadataSupervisorManagerTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.ImmutableMap; import io.druid.indexing.overlord.supervisor.SupervisorSpec; import io.druid.indexing.overlord.supervisor.VersionedSupervisorSpec; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -126,7 +127,7 @@ public class SQLMetadataSupervisorManagerTest @Override public Void withHandle(Handle handle) throws Exception { - handle.createStatement(String.format("DROP TABLE %s", tablesConfig.getSupervisorTable())) + handle.createStatement(StringUtils.format("DROP TABLE %s", tablesConfig.getSupervisorTable())) .execute(); return null; } diff --git a/server/src/test/java/io/druid/metadata/TestDerbyConnector.java b/server/src/test/java/io/druid/metadata/TestDerbyConnector.java index 983b445001b..f5e965ce6e3 100644 --- a/server/src/test/java/io/druid/metadata/TestDerbyConnector.java +++ b/server/src/test/java/io/druid/metadata/TestDerbyConnector.java @@ -21,6 +21,7 @@ package io.druid.metadata; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.storage.derby.DerbyConnector; import org.junit.Assert; import org.junit.rules.ExternalResource; @@ -60,7 +61,7 @@ public class TestDerbyConnector extends DerbyConnector catch (UnableToObtainConnectionException e) { SQLException cause = (SQLException) e.getCause(); // error code "08006" indicates proper shutdown - Assert.assertEquals(String.format("Derby not shutdown: [%s]", cause.toString()), "08006", cause.getSQLState()); + Assert.assertEquals(StringUtils.format("Derby not shutdown: [%s]", cause.toString()), "08006", cause.getSQLState()); } } diff --git a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java index 447bfddfe62..19fb108ec81 100644 --- a/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java +++ b/server/src/test/java/io/druid/segment/loading/LocalDataSegmentPusherTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.io.Files; import com.google.common.primitives.Ints; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.joda.time.Interval; @@ -143,7 +144,7 @@ public class LocalDataSegmentPusherTest config.storageDirectory = new File("druid"); Assert.assertEquals( - String.format("file:%s/druid", System.getProperty("user.dir")), + StringUtils.format("file:%s/druid", System.getProperty("user.dir")), new LocalDataSegmentPusher(config, new ObjectMapper()).getPathForHadoop() ); } diff --git a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java index 93ceafaa0bf..b6d7b9d778b 100644 --- a/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java +++ b/server/src/test/java/io/druid/segment/realtime/appenderator/AppenderatorDriverTest.java @@ -34,6 +34,7 @@ import io.druid.data.input.Committer; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.Granularity; import io.druid.query.SegmentDescriptor; @@ -165,7 +166,7 @@ public class AppenderatorDriverTest ImmutableList.of("dim2"), ImmutableMap.of( "dim2", - String.format("bar-%d", i), + StringUtils.format("bar-%d", i), "met1", 2.0 ) diff --git a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java index 50974954241..64ac0b8eee5 100644 --- a/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java +++ b/server/src/test/java/io/druid/server/audit/SQLAuditManagerTest.java @@ -24,6 +24,7 @@ import io.druid.audit.AuditEntry; import io.druid.audit.AuditInfo; import io.druid.audit.AuditManager; import io.druid.jackson.DefaultObjectMapper; +import io.druid.java.util.common.StringUtils; import io.druid.metadata.TestDerbyConnector; import io.druid.server.metrics.NoopServiceEmitter; import org.joda.time.DateTime; @@ -246,7 +247,7 @@ public class SQLAuditManagerTest @Override public Void withHandle(Handle handle) throws Exception { - handle.createStatement(String.format("DROP TABLE %s", tableName)) + handle.createStatement(StringUtils.format("DROP TABLE %s", tableName)) .execute(); return null; } diff --git a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java index 4327d8720ec..ef4cb50e9c2 100644 --- a/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java +++ b/server/src/test/java/io/druid/server/coordinator/DruidCoordinatorBalancerTester.java @@ -20,6 +20,7 @@ package io.druid.server.coordinator; import io.druid.client.ImmutableDruidServer; +import io.druid.java.util.common.StringUtils; import io.druid.server.coordinator.helper.DruidCoordinatorBalancer; import io.druid.timeline.DataSegment; @@ -67,7 +68,7 @@ public class DruidCoordinatorBalancerTester extends DruidCoordinatorBalancer currentlyMovingSegments.get("normal").put(segmentName, segment); } catch (Exception e) { - log.info(e, String.format("[%s] : Moving exception", segmentName)); + log.info(e, StringUtils.format("[%s] : Moving exception", segmentName)); } } else { currentlyMovingSegments.get("normal").remove(segmentName); diff --git a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java index ae317314b21..c594a14bd2b 100644 --- a/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java +++ b/server/src/test/java/io/druid/server/http/security/ResourceFilterTestHelper.java @@ -32,6 +32,7 @@ import com.google.inject.Module; import com.sun.jersey.spi.container.ContainerRequest; import com.sun.jersey.spi.container.ResourceFilter; import com.sun.jersey.spi.container.ResourceFilters; +import io.druid.java.util.common.StringUtils; import io.druid.server.security.Access; import io.druid.server.security.Action; import io.druid.server.security.AuthConfig; @@ -216,7 +217,7 @@ public class ResourceFilterTestHelper { if (method.getAnnotation(Path.class) != null) { return new Object[]{ - String.format("%s%s", basepath, method.getAnnotation(Path.class).value()), + StringUtils.format("%s%s", basepath, method.getAnnotation(Path.class).value()), input.getAnnotation(GET.class) == null ? (method.getAnnotation(DELETE.class) == null ? "POST" : "DELETE") : "GET", diff --git a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java index 9ec6af6924e..18d2412de0e 100644 --- a/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java +++ b/server/src/test/java/io/druid/server/shard/SingleDimensionShardSpecTest.java @@ -29,6 +29,7 @@ import com.google.common.collect.Maps; import io.druid.data.input.InputRow; import io.druid.data.input.MapBasedInputRow; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.timeline.partition.SingleDimensionShardSpec; import org.junit.Assert; import org.junit.Test; @@ -112,7 +113,7 @@ public class SingleDimensionShardSpecTest } ) ); - Assert.assertEquals(String.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow)); + Assert.assertEquals(StringUtils.format("spec[%s], row[%s]", spec, inputRow), pair.lhs, spec.isInChunk(inputRow.getTimestampFromEpoch(), inputRow)); } } } diff --git a/services/src/main/java/io/druid/cli/DumpSegment.java b/services/src/main/java/io/druid/cli/DumpSegment.java index d377e50ec4e..cb73808c9e3 100644 --- a/services/src/main/java/io/druid/cli/DumpSegment.java +++ b/services/src/main/java/io/druid/cli/DumpSegment.java @@ -47,6 +47,7 @@ import io.druid.guice.QueryableModule; import io.druid.guice.annotations.Json; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; @@ -172,7 +173,7 @@ public class DumpSegment extends GuiceRunnable final DumpType dumpType; try { - dumpType = DumpType.valueOf(dumpTypeString.toUpperCase()); + dumpType = DumpType.valueOf(StringUtils.toUpperCase(dumpTypeString)); } catch (Exception e) { throw new IAE("Not a valid dump type: %s", dumpTypeString); diff --git a/services/src/main/java/io/druid/cli/PullDependencies.java b/services/src/main/java/io/druid/cli/PullDependencies.java index 8bdbeb6fbf1..a9dfe328e54 100644 --- a/services/src/main/java/io/druid/cli/PullDependencies.java +++ b/services/src/main/java/io/druid/cli/PullDependencies.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Inject; - import io.airlift.airline.Command; import io.airlift.airline.Option; import io.druid.guice.ExtensionsConfig; @@ -176,7 +175,7 @@ public class PullDependencies implements Runnable title = "A local repository that Maven will use to put downloaded files. Then pull-deps will lay these files out into the extensions directory as needed.", required = false ) - public String localRepository = String.format("%s/%s", System.getProperty("user.home"), ".m2/repository"); + public String localRepository = StringUtils.format("%s/%s", System.getProperty("user.home"), ".m2/repository"); @Option( name = {"-r", "--remoteRepository"}, @@ -314,7 +313,7 @@ public class PullDependencies implements Runnable { String scope = node.getDependency().getScope(); if (scope != null) { - scope = scope.toLowerCase(); + scope = StringUtils.toLowerCase(scope); if (scope.equals("provided")) { return false; } @@ -473,11 +472,9 @@ public class PullDependencies implements Runnable if (!atLocation.mkdir()) { throw new ISE( - String.format( - "Unable to create directory at [%s] for coordinate [%s]", - atLocation.getAbsolutePath(), - coordinate - ) + "Unable to create directory at [%s] for coordinate [%s]", + atLocation.getAbsolutePath(), + coordinate ); } } diff --git a/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java b/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java index cb22683c86c..b5cf21869c0 100644 --- a/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java +++ b/services/src/main/java/io/druid/cli/convert/DatabasePropertiesConverter.java @@ -22,6 +22,7 @@ package io.druid.cli.convert; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.druid.java.util.common.StringUtils; import java.util.List; import java.util.Map; @@ -88,8 +89,8 @@ public class DatabasePropertiesConverter implements PropertyConverter { final String value = properties.getProperty(property); if (value != null) { - if (!value.equals(String.format("%s_%s", tablePrefix, tablename))) { - retVal.put(String.format("druid.db.tables.%s", tablename), value); + if (!value.equals(StringUtils.format("%s_%s", tablePrefix, tablename))) { + retVal.put(StringUtils.format("druid.db.tables.%s", tablename), value); } } } diff --git a/services/src/main/java/io/druid/cli/convert/IndexCacheConverter.java b/services/src/main/java/io/druid/cli/convert/IndexCacheConverter.java index 777e9b6a65c..e0c53aa09fd 100644 --- a/services/src/main/java/io/druid/cli/convert/IndexCacheConverter.java +++ b/services/src/main/java/io/druid/cli/convert/IndexCacheConverter.java @@ -20,6 +20,7 @@ package io.druid.cli.convert; import com.google.common.collect.ImmutableMap; +import io.druid.java.util.common.StringUtils; import java.util.Map; import java.util.Properties; @@ -44,7 +45,7 @@ public class IndexCacheConverter implements PropertyConverter return ImmutableMap.of( "druid.segmentCache.locations", - String.format( + StringUtils.format( "[{\"path\": \"%s\", \"maxSize\": %s}]", value, properties.getProperty("druid.server.maxSize") ) ); diff --git a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java index 99e7abaa2b2..e4b0c9d7af5 100644 --- a/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java +++ b/services/src/main/java/io/druid/cli/validate/DruidJsonValidator.java @@ -119,7 +119,7 @@ public class DruidJsonValidator extends GuiceRunnable { File file = new File(jsonFile); if (!file.exists()) { - System.out.printf("File[%s] does not exist.%n", file); + LOG.info("File[%s] does not exist.%n", file); } final Injector injector = makeInjector(); @@ -205,7 +205,7 @@ public class DruidJsonValidator extends GuiceRunnable } } catch (Exception e) { - System.out.println("INVALID JSON!"); + LOG.error(e, "INVALID JSON!"); throw Throwables.propagate(e); } } diff --git a/services/src/test/java/io/druid/cli/PullDependenciesTest.java b/services/src/test/java/io/druid/cli/PullDependenciesTest.java index 2263ddfec86..f55e949cfe9 100644 --- a/services/src/test/java/io/druid/cli/PullDependenciesTest.java +++ b/services/src/test/java/io/druid/cli/PullDependenciesTest.java @@ -23,6 +23,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import io.druid.guice.ExtensionsConfig; +import io.druid.java.util.common.StringUtils; import io.tesla.aether.internal.DefaultTeslaAether; import org.eclipse.aether.artifact.Artifact; import org.eclipse.aether.artifact.DefaultArtifact; @@ -138,7 +139,7 @@ public class PullDependenciesTest final String version = artifact.getVersion(); for (int i = 0; i < jarNames.size(); ++i) { expectedJars[i] = new File( - String.format( + StringUtils.format( "%s/%s/%s/%s", rootHadoopDependenciesDir, artifactId, @@ -149,7 +150,7 @@ public class PullDependenciesTest } } else { for (int i = 0; i < jarNames.size(); ++i) { - expectedJars[i] = new File(String.format("%s/%s/%s", rootExtensionsDir, artifactId, jarNames.get(i))); + expectedJars[i] = new File(StringUtils.format("%s/%s/%s", rootExtensionsDir, artifactId, jarNames.get(i))); } } return expectedJars; diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java index 840a04d283c..222c063f654 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidMeta.java @@ -30,6 +30,7 @@ import com.google.common.collect.Iterables; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.inject.Inject; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.logger.Logger; import io.druid.sql.calcite.planner.Calcites; import io.druid.sql.calcite.planner.PlannerFactory; @@ -78,7 +79,7 @@ public class DruidMeta extends MetaImpl this.config = config; this.exec = Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() - .setNameFormat(String.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode()))) + .setNameFormat(StringUtils.format("DruidMeta@%s-ScheduledExecutor", Integer.toHexString(hashCode()))) .setDaemon(true) .build() ); diff --git a/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java b/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java index a1ef27e731f..07e8e26f588 100644 --- a/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java +++ b/sql/src/main/java/io/druid/sql/avatica/DruidStatement.java @@ -24,6 +24,7 @@ import com.google.common.base.Throwables; import com.google.common.collect.ImmutableMap; import io.druid.concurrent.Execs; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; import io.druid.java.util.common.guava.Yielder; @@ -103,7 +104,7 @@ public class DruidStatement implements Closeable this.queryContext = queryContext == null ? ImmutableMap.of() : queryContext; this.onClose = Preconditions.checkNotNull(onClose, "onClose"); this.yielderOpenCloseExecutor = Execs.singleThreaded( - String.format("JDBCYielderOpenCloseExecutor-connection-%s-statement-%d", connectionId, statementId) + StringUtils.format("JDBCYielderOpenCloseExecutor-connection-%s-statement-%d", connectionId, statementId) ); } diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java index a6bcc94d288..0e5db76f3fb 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/Expressions.java @@ -28,6 +28,7 @@ import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularity; import io.druid.math.expr.ExprType; import io.druid.query.aggregation.PostAggregator; @@ -298,7 +299,7 @@ public class Expressions throw new ISE("WTF?! Expression referred to nonexistent index[%d]", ref.getIndex()); } - return String.format("\"%s\"", escape(columnName)); + return StringUtils.format("\"%s\"", escape(columnName)); } else if (kind == SqlKind.CAST || kind == SqlKind.REINTERPRET) { // Translate casts. final RexNode operand = ((RexCall) expression).getOperands().get(0); @@ -310,7 +311,7 @@ public class Expressions final ExprType fromType = MATH_TYPES.get(operand.getType().getSqlTypeName()); final ExprType toType = MATH_TYPES.get(sqlTypeName); if (fromType != toType) { - return String.format("CAST(%s, '%s')", operandExpression, toType.toString()); + return StringUtils.format("CAST(%s, '%s')", operandExpression, toType.toString()); } else { return operandExpression; } @@ -330,7 +331,7 @@ public class Expressions SqlKind.MINUS, "-" ).get(kind); - return String.format("(%s %s %s)", lhsExpression, op, rhsExpression); + return StringUtils.format("(%s %s %s)", lhsExpression, op, rhsExpression); } else if (kind == SqlKind.OTHER_FUNCTION) { final String calciteFunction = ((RexCall) expression).getOperator().getName(); final String druidFunction = MATH_FUNCTIONS.get(calciteFunction); @@ -347,14 +348,14 @@ public class Expressions if ("MOD".equals(calciteFunction)) { // Special handling for MOD, which is a function in Calcite but a binary operator in Druid. Preconditions.checkState(functionArgs.size() == 2, "WTF?! Expected 2 args for MOD."); - return String.format("(%s %s %s)", functionArgs.get(0), "%", functionArgs.get(1)); + return StringUtils.format("(%s %s %s)", functionArgs.get(0), "%", functionArgs.get(1)); } if (druidFunction == null) { return null; } - return String.format("%s(%s)", druidFunction, Joiner.on(", ").join(functionArgs)); + return StringUtils.format("%s(%s)", druidFunction, Joiner.on(", ").join(functionArgs)); } else if (kind == SqlKind.LITERAL) { // Translate literal. if (SqlTypeName.NUMERIC_TYPES.contains(sqlTypeName)) { diff --git a/sql/src/main/java/io/druid/sql/calcite/expression/RowExtraction.java b/sql/src/main/java/io/druid/sql/calcite/expression/RowExtraction.java index b8f9b44d579..42e1aa0f0e7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/expression/RowExtraction.java +++ b/sql/src/main/java/io/druid/sql/calcite/expression/RowExtraction.java @@ -20,6 +20,7 @@ package io.druid.sql.calcite.expression; import com.google.common.base.Preconditions; +import io.druid.java.util.common.StringUtils; import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.ExtractionDimensionSpec; @@ -184,7 +185,7 @@ public class RowExtraction public String toString() { if (extractionFn != null) { - return String.format("%s(%s)", extractionFn, column); + return StringUtils.format("%s(%s)", extractionFn, column); } else { return column; } diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java index 59f8943994c..5856deaaba6 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/Calcites.java @@ -21,6 +21,7 @@ package io.druid.sql.calcite.planner; import com.google.common.io.BaseEncoding; import com.google.common.primitives.Chars; +import io.druid.java.util.common.StringUtils; import io.druid.segment.column.ValueType; import io.druid.sql.calcite.schema.DruidSchema; import io.druid.sql.calcite.schema.InformationSchema; @@ -37,6 +38,7 @@ import org.joda.time.Days; import java.nio.charset.Charset; import java.util.Calendar; +import java.util.Locale; /** * Utility functions for Calcite. @@ -58,7 +60,7 @@ public class Calcites final String charset = ConversionUtil.NATIVE_UTF16_CHARSET_NAME; System.setProperty("saffron.default.charset", Calcites.defaultCharset().name()); System.setProperty("saffron.default.nationalcharset", Calcites.defaultCharset().name()); - System.setProperty("saffron.default.collation.name", String.format("%s$en_US", charset)); + System.setProperty("saffron.default.collation.name", StringUtils.format("%s$en_US", charset)); } public static Charset defaultCharset() @@ -153,7 +155,7 @@ public class Calcites */ public static Calendar jodaToCalciteCalendarLiteral(final DateTime dateTime, final DateTimeZone timeZone) { - final Calendar calendar = Calendar.getInstance(); + final Calendar calendar = Calendar.getInstance(Locale.ENGLISH); calendar.setTimeInMillis(Calcites.jodaToCalciteTimestamp(dateTime, timeZone)); return calendar; } diff --git a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java index 827eb4546ec..9e1aab354f0 100644 --- a/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java +++ b/sql/src/main/java/io/druid/sql/calcite/planner/DruidOperatorTable.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.planner; import com.google.common.collect.Maps; import com.google.inject.Inject; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.StringUtils; import io.druid.sql.calcite.aggregation.SqlAggregator; import io.druid.sql.calcite.expression.SqlExtractionOperator; import org.apache.calcite.sql.SqlFunctionCategory; @@ -54,14 +55,14 @@ public class DruidOperatorTable implements SqlOperatorTable this.extractionOperators = Maps.newHashMap(); for (SqlAggregator aggregator : aggregators) { - final String lcname = aggregator.calciteFunction().getName().toLowerCase(); + final String lcname = StringUtils.toLowerCase(aggregator.calciteFunction().getName()); if (this.aggregators.put(lcname, aggregator) != null) { throw new ISE("Cannot have two operators with name[%s]", lcname); } } for (SqlExtractionOperator extractionFunction : extractionOperators) { - final String lcname = extractionFunction.calciteFunction().getName().toLowerCase(); + final String lcname = StringUtils.toLowerCase(extractionFunction.calciteFunction().getName()); if (this.aggregators.containsKey(lcname) || this.extractionOperators.put(lcname, extractionFunction) != null) { throw new ISE("Cannot have two operators with name[%s]", lcname); } @@ -70,12 +71,12 @@ public class DruidOperatorTable implements SqlOperatorTable public SqlAggregator lookupAggregator(final String opName) { - return aggregators.get(opName.toLowerCase()); + return aggregators.get(StringUtils.toLowerCase(opName)); } public SqlExtractionOperator lookupExtractionOperator(final SqlKind kind, final String opName) { - final SqlExtractionOperator extractionOperator = extractionOperators.get(opName.toLowerCase()); + final SqlExtractionOperator extractionOperator = extractionOperators.get(StringUtils.toLowerCase(opName)); if (extractionOperator != null && extractionOperator.calciteFunction().getKind() == kind) { return extractionOperator; } else { @@ -92,12 +93,13 @@ public class DruidOperatorTable implements SqlOperatorTable ) { if (opName.names.size() == 1 && syntax == SqlSyntax.FUNCTION) { - final SqlAggregator aggregator = aggregators.get(opName.getSimple().toLowerCase()); + final SqlAggregator aggregator = aggregators.get(StringUtils.toLowerCase(opName.getSimple())); if (aggregator != null) { operatorList.add(aggregator.calciteFunction()); } - final SqlExtractionOperator extractionFunction = extractionOperators.get(opName.getSimple().toLowerCase()); + final SqlExtractionOperator extractionFunction = + extractionOperators.get(StringUtils.toLowerCase(opName.getSimple())); if (extractionFunction != null) { operatorList.add(extractionFunction.calciteFunction()); } diff --git a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java index 26f8265ae54..aa67e22e9c7 100644 --- a/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java +++ b/sql/src/main/java/io/druid/sql/calcite/rel/DruidSemiJoin.java @@ -22,6 +22,7 @@ package io.druid.sql.calcite.rel; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -225,7 +226,7 @@ public class DruidSemiJoin extends DruidRel values.add(stringValue); if (values.size() > maxSemiJoinRowsInMemory) { throw new ResourceLimitExceededException( - String.format("maxSemiJoinRowsInMemory[%,d] exceeded", maxSemiJoinRowsInMemory) + StringUtils.format("maxSemiJoinRowsInMemory[%,d] exceeded", maxSemiJoinRowsInMemory) ); } } diff --git a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java index d54a07f2f03..48d9af42aa9 100644 --- a/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/io/druid/sql/calcite/schema/DruidSchema.java @@ -38,6 +38,7 @@ import io.druid.client.ServerView; import io.druid.client.TimelineServerView; import io.druid.common.utils.JodaUtils; import io.druid.guice.ManageLifecycle; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.concurrent.ScheduledExecutors; import io.druid.java.util.common.guava.Sequence; import io.druid.java.util.common.guava.Sequences; @@ -352,7 +353,7 @@ public class DruidSchema extends AbstractSchema if (!columnTypes.containsKey(entry.getKey()) || timestamp >= maxTimestamp) { ValueType valueType; try { - valueType = ValueType.valueOf(entry.getValue().getType().toUpperCase()); + valueType = ValueType.valueOf(StringUtils.toUpperCase(entry.getValue().getType())); } catch (IllegalArgumentException e) { // Assume unrecognized types are some flavor of COMPLEX. This throws away information about exactly diff --git a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java index 8aa2b95077e..e3103a5416c 100644 --- a/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/io/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -31,6 +31,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import io.druid.java.util.common.Pair; +import io.druid.java.util.common.StringUtils; import io.druid.math.expr.ExprMacroTable; import io.druid.server.DruidNode; import io.druid.server.initialization.ServerConfig; @@ -136,7 +137,7 @@ public class DruidAvaticaHandlerTest server = new Server(new InetSocketAddress("127.0.0.1", port)); server.setHandler(handler); server.start(); - url = String.format( + url = StringUtils.format( "jdbc:avatica:remote:url=http://127.0.0.1:%d%s", port, DruidAvaticaHandler.AVATICA_PATH @@ -381,7 +382,7 @@ public class DruidAvaticaHandlerTest Executors.newFixedThreadPool(AVATICA_CONFIG.getMaxStatementsPerConnection()) ); for (int i = 0; i < 2000; i++) { - final String query = String.format("SELECT COUNT(*) + %s AS ci FROM foo", i); + final String query = StringUtils.format("SELECT COUNT(*) + %s AS ci FROM foo", i); futures.add( exec.submit(() -> { try ( @@ -581,7 +582,7 @@ public class DruidAvaticaHandlerTest Server smallFrameServer = new Server(new InetSocketAddress("127.0.0.1", port)); smallFrameServer.setHandler(handler); smallFrameServer.start(); - String smallFrameUrl = String.format( + String smallFrameUrl = StringUtils.format( "jdbc:avatica:remote:url=http://127.0.0.1:%d%s", port, DruidAvaticaHandler.AVATICA_PATH diff --git a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java index 6b82125ea64..d06284f2ccb 100644 --- a/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/io/druid/sql/calcite/CalciteQueryTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import io.druid.hll.HLLCV1; +import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.granularity.Granularities; import io.druid.java.util.common.granularity.PeriodGranularity; import io.druid.java.util.common.guava.Sequences; @@ -4288,10 +4289,10 @@ public class CalciteQueryTest log.info("row #%d: %s", i, Arrays.toString(results.get(i))); } - Assert.assertEquals(String.format("result count: %s", sql), expectedResults.size(), results.size()); + Assert.assertEquals(StringUtils.format("result count: %s", sql), expectedResults.size(), results.size()); for (int i = 0; i < results.size(); i++) { Assert.assertArrayEquals( - String.format("result #%d: %s", i + 1, sql), + StringUtils.format("result #%d: %s", i + 1, sql), expectedResults.get(i), results.get(i) ); @@ -4301,13 +4302,13 @@ public class CalciteQueryTest final List recordedQueries = queryLogHook.getRecordedQueries(); Assert.assertEquals( - String.format("query count: %s", sql), + StringUtils.format("query count: %s", sql), expectedQueries.size(), recordedQueries.size() ); for (int i = 0; i < expectedQueries.size(); i++) { Assert.assertEquals( - String.format("query #%d: %s", i + 1, sql), + StringUtils.format("query #%d: %s", i + 1, sql), expectedQueries.get(i), recordedQueries.get(i) ); diff --git a/sql/src/test/java/io/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java b/sql/src/test/java/io/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java index a96596555d6..5c3f8a97ba1 100644 --- a/sql/src/test/java/io/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java +++ b/sql/src/test/java/io/druid/sql/calcite/util/SpecificSegmentsQuerySegmentWalker.java @@ -27,6 +27,7 @@ import com.google.common.collect.Ordering; import com.google.common.io.Closeables; import com.google.common.util.concurrent.MoreExecutors; import io.druid.java.util.common.ISE; +import io.druid.java.util.common.UOE; import io.druid.java.util.common.guava.FunctionalIterable; import io.druid.java.util.common.guava.Sequence; import io.druid.query.FinalizeResultsQueryRunner; @@ -204,9 +205,7 @@ public class SpecificSegmentsQuerySegmentWalker implements QuerySegmentWalker, C if (query.getDataSource() instanceof TableDataSource) { return timelines.get(((TableDataSource) query.getDataSource()).getName()); } else { - throw new UnsupportedOperationException( - String.format("DataSource type[%s] unsupported", query.getDataSource().getClass().getName()) - ); + throw new UOE("DataSource type[%s] unsupported", query.getDataSource().getClass().getName()); } }