com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.core.type.TypeReference) @ Use ObjectMapper#readerFor instead com.fasterxml.jackson.databind.ObjectMapper#reader(com.fasterxml.jackson.databind.JavaType) @ Use ObjectMapper#readerFor instead com.fasterxml.jackson.databind.ObjectMapper#reader(java.lang.Class) @ Use ObjectMapper#readerFor instead com.fasterxml.jackson.databind.ObjectMapper#writeValue(com.fasterxml.jackson.core.JsonGenerator, java.lang.Object) @ Use JacksonUtils#writeObjectUsingSerializerProvider to allow SerializerProvider reuse com.fasterxml.jackson.core.JsonGenerator#writeObject(java.lang.Object) @ Use JacksonUtils#writeObjectUsingSerializerProvider to allow SerializerProvider reuse com.google.common.base.Charsets @ Use java.nio.charset.StandardCharsets instead com.google.common.collect.Iterators#emptyIterator() @ Use java.util.Collections#emptyIterator() com.google.common.collect.Iterators#mergeSorted(java.lang.Iterable,java.util.Comparator) @ Use org.apache.druid.java.util.common.collect.Utils#mergeSorted() com.google.common.collect.Lists#newArrayList() @ Create java.util.ArrayList directly com.google.common.collect.Lists#newLinkedList() @ Use ArrayList or ArrayDeque instead com.google.common.collect.Lists#newLinkedList(java.lang.Iterable) @ Use ArrayList or ArrayDeque instead com.google.common.collect.MapMaker @ Create java.util.concurrent.ConcurrentHashMap directly com.google.common.collect.Maps#newConcurrentMap() @ Create java.util.concurrent.ConcurrentHashMap directly com.google.common.collect.Maps#newHashMap() @ Create java.util.HashMap directly com.google.common.collect.Maps#newHashMap(java.util.Map) @ Create java.util.HashMap directly com.google.common.collect.Maps#newTreeMap() @ Create java.util.TreeMap directly com.google.common.collect.Maps#newTreeMap(java.util.Comparator) @ Create java.util.TreeMap directly com.google.common.collect.Maps#newTreeMap(java.util.SortedMap) @ Create java.util.TreeMap directly com.google.common.collect.Queues#newArrayDeque() @ Create java.util.ArrayDeque directly com.google.common.collect.Queues#newConcurrentLinkedQueue() @ Create java.util.concurrent.ConcurrentLinkedQueue directly com.google.common.collect.Queues#newLinkedBlockingQueue() @ Create java.util.concurrent.LinkedBlockingQueue directly com.google.common.collect.Sets#newHashSet() @ Create java.util.HashSet directly com.google.common.collect.Sets#newLinkedHashSet() @ Create java.util.LinkedHashSet directly com.google.common.collect.Sets#newTreeSet() @ Create java.util.TreeSet directly com.google.common.collect.Sets#newTreeSet(java.util.Comparator) @ Create java.util.TreeSet directly com.google.common.io.Files#createTempDir() @ Use org.apache.druid.java.util.common.FileUtils.createTempDir() java.io.File#mkdirs() @ Use org.apache.druid.java.util.common.FileUtils.mkdirp instead java.io.File#toURL() @ Use java.io.File#toURI() and java.net.URI#toURL() instead java.lang.String#matches(java.lang.String) @ Use startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly java.lang.String#replace(java.lang.CharSequence,java.lang.CharSequence) @ Use one of the appropriate methods in StringUtils instead java.lang.String#replaceAll(java.lang.String,java.lang.String) @ Use one of the appropriate methods in StringUtils instead, or compile and cache a Pattern explicitly java.lang.String#replaceFirst(java.lang.String,java.lang.String) @ Use String.indexOf() and substring methods, or compile and cache a Pattern explicitly java.nio.file.Files#createTempDirectory(java.lang.String,java.nio.file.attribute.FileAttribute[]) @ Use org.apache.druid.java.util.common.FileUtils.createTempDir() java.nio.file.Files#createTempDirectory(java.nio.file.Path,java.lang.String,java.nio.file.attribute.FileAttribute[]) @ Use org.apache.druid.java.util.common.FileUtils.createTempDir() java.util.HashMap#(int) @ Use com.google.common.collect.Maps#newHashMapWithExpectedSize(int) instead java.util.HashMap#(int, float) @ Use com.google.common.collect.Maps#newHashMapWithExpectedSize(int) instead java.util.LinkedHashMap#(int) @ Use org.apache.druid.utils.CollectionUtils#newLinkedHashMapWithExpectedSize(int) instead java.util.LinkedHashMap#(int, float) @ Use org.apache.druid.utils.CollectionUtils#newLinkedHashMapWithExpectedSize(int) instead java.util.HashSet#(int) @ Use com.google.collect.Sets#newHashSetWithExpectedSize(int) instead java.util.HashSet#(int, float) @ Use com.google.collect.Sets#newHashSetWithExpectedSize(int) instead java.util.LinkedHashSet#(int) @ Use com.google.collect.Sets#newLinkedHashSatWithExpectedSize(int) instead java.util.LinkedHashSet#(int, float) @ Use com.google.collect.Sets#newLinkedHashSatWithExpectedSize(int) instead java.util.LinkedList @ Use ArrayList or ArrayDeque instead java.util.Random#() @ Use ThreadLocalRandom.current() or the constructor with a seed (the latter in tests only!) java.lang.Math#random() @ Use ThreadLocalRandom.current() java.util.regex.Pattern#matches(java.lang.String,java.lang.CharSequence) @ Use String.startsWith(), endsWith(), contains(), or compile and cache a Pattern explicitly org.apache.calcite.sql.type.OperandTypes#LITERAL @ LITERAL type checker throws when literals with CAST are passed. Use org.apache.druid.sql.calcite.expression.DefaultOperandTypeChecker instead. org.apache.calcite.sql.type.OperandTypes#BOOLEAN_LITERAL @ Create a type checker like org.apache.calcite.sql.type.POSITIVE_INTEGER_LITERAL and use that instead org.apache.calcite.sql.type.OperandTypes#ARRAY_BOOLEAN_LITERAL @ Create a type checker like org.apache.calcite.sql.type.POSITIVE_INTEGER_LITERAL and use that instead org.apache.calcite.sql.type.OperandTypes#POSITIVE_INTEGER_LITERAL @ Use org.apache.calcite.sql.type.POSITIVE_INTEGER_LITERAL instead org.apache.calcite.sql.type.OperandTypes#UNIT_INTERVAL_NUMERIC_LITERAL @ Create a type checker like org.apache.calcite.sql.type.POSITIVE_INTEGER_LITERAL and use that instead org.apache.calcite.sql.type.OperandTypes#NUMERIC_UNIT_INTERVAL_NUMERIC_LITERAL @ Create a type checker like org.apache.calcite.sql.type.POSITIVE_INTEGER_LITERAL and use that instead org.apache.calcite.sql.type.OperandTypes#NULLABLE_LITERAL @ Create an instance of org.apache.calcite.sql.type.CastedLiteralOperandTypeChecker that allows nulls and use that instead org.apache.commons.io.FileUtils#getTempDirectory() @ Use org.junit.rules.TemporaryFolder for tests instead org.apache.commons.io.FileUtils#deleteDirectory(java.io.File) @ Use org.apache.druid.java.util.common.FileUtils#deleteDirectory() org.apache.commons.io.FileUtils#forceMkdir(java.io.File) @ Use org.apache.druid.java.util.common.FileUtils.mkdirp instead org.apache.datasketches.memory.Memory#wrap(byte[], int, int, java.nio.ByteOrder) @ The implementation isn't correct in datasketches-memory-2.2.0. Please refer to https://github.com/apache/datasketches-memory/issues/178. Use wrap(byte[]) and modify the offset by the callers instead java.lang.Class#getCanonicalName() @ Class.getCanonicalName can return null for anonymous types, use Class.getName instead. java.util.concurrent.Executors#newFixedThreadPool(int) @ Executor is non-daemon and can prevent JVM shutdown, use org.apache.druid.java.util.common.concurrent.Execs#multiThreaded(int, java.lang.String) instead. @defaultMessage Use Locale.ENGLISH com.ibm.icu.text.DateFormatSymbols#() com.ibm.icu.text.SimpleDateFormat#() com.ibm.icu.text.SimpleDateFormat#(java.lang.String) @defaultMessage For performance reasons, use the utf8Base64 / encodeBase64 / encodeBase64String / decodeBase64 / decodeBase64String methods in StringUtils org.apache.commons.codec.binary.Base64 com.google.common.io.BaseEncoding#base64() @defaultMessage Use com.google.errorprone.annotations.concurrent.GuardedBy javax.annotation.concurrent.GuardedBy com.amazonaws.annotation.GuardedBy org.powermock.** @ Use Mockito instead of Powermock for compatibility with newer Java versions