diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/FlattenJSONBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/FlattenJSONBenchmark.java index 02f6b880244..14169e21b68 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/FlattenJSONBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/FlattenJSONBenchmark.java @@ -48,7 +48,7 @@ import java.util.concurrent.TimeUnit; @Fork(value = 1) public class FlattenJSONBenchmark { - private static final int numEvents = 100000; + private static final int NUM_EVENTS = 100000; List flatInputs; List nestedInputs; @@ -67,15 +67,15 @@ public class FlattenJSONBenchmark { FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil(); flatInputs = new ArrayList(); - for (int i = 0; i < numEvents; i++) { + for (int i = 0; i < NUM_EVENTS; i++) { flatInputs.add(gen.generateFlatEvent()); } nestedInputs = new ArrayList(); - for (int i = 0; i < numEvents; i++) { + for (int i = 0; i < NUM_EVENTS; i++) { nestedInputs.add(gen.generateNestedEvent()); } jqInputs = new ArrayList(); - for (int i = 0; i < numEvents; i++) { + for (int i = 0; i < NUM_EVENTS; i++) { jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested" } @@ -95,7 +95,7 @@ public class FlattenJSONBenchmark for (String s : parsed.keySet()) { blackhole.consume(parsed.get(s)); } - flatCounter = (flatCounter + 1) % numEvents; + flatCounter = (flatCounter + 1) % NUM_EVENTS; return parsed; } @@ -108,7 +108,7 @@ public class FlattenJSONBenchmark for (String s : parsed.keySet()) { blackhole.consume(parsed.get(s)); } - nestedCounter = (nestedCounter + 1) % numEvents; + nestedCounter = (nestedCounter + 1) % NUM_EVENTS; return parsed; } @@ -121,7 +121,7 @@ public class FlattenJSONBenchmark for (String s : parsed.keySet()) { blackhole.consume(parsed.get(s)); } - jqCounter = (jqCounter + 1) % numEvents; + jqCounter = (jqCounter + 1) % NUM_EVENTS; return parsed; } @@ -134,7 +134,7 @@ public class FlattenJSONBenchmark for (String s : parsed.keySet()) { blackhole.consume(parsed.get(s)); } - nestedCounter = (nestedCounter + 1) % numEvents; + nestedCounter = (nestedCounter + 1) % NUM_EVENTS; return parsed; } @@ -147,7 +147,7 @@ public class FlattenJSONBenchmark for (String s : parsed.keySet()) { blackhole.consume(parsed.get(s)); } - nestedCounter = (nestedCounter + 1) % numEvents; + nestedCounter = (nestedCounter + 1) % NUM_EVENTS; return parsed; } diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java index eacbc16b5d9..03c31a2daa5 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/FloatCompressionBenchmarkFileGenerator.java @@ -46,7 +46,7 @@ public class FloatCompressionBenchmarkFileGenerator { private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class); public static final int ROW_NUM = 5000000; - public static final List compressions = + public static final List COMPRESSIONS = ImmutableList.of( CompressionStrategy.LZ4, CompressionStrategy.NONE @@ -138,7 +138,7 @@ public class FloatCompressionBenchmarkFileGenerator // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided for (Map.Entry entry : generators.entrySet()) { - for (CompressionStrategy compression : compressions) { + for (CompressionStrategy compression : COMPRESSIONS) { String name = entry.getKey() + "-" + compression; log.info("%s: ", name); File compFile = new File(dir, name); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/GenericIndexedBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/GenericIndexedBenchmark.java index 8998090f790..27a57f9ca9d 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/GenericIndexedBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/GenericIndexedBenchmark.java @@ -62,7 +62,7 @@ public class GenericIndexedBenchmark { public static final int ITERATIONS = 10000; - static final ObjectStrategy byteArrayStrategy = new ObjectStrategy() + static final ObjectStrategy BYTE_ARRAY_STRATEGY = new ObjectStrategy() { @Override public Class getClazz() @@ -108,7 +108,7 @@ public class GenericIndexedBenchmark GenericIndexedWriter genericIndexedWriter = new GenericIndexedWriter<>( new OffHeapMemorySegmentWriteOutMedium(), "genericIndexedBenchmark", - byteArrayStrategy + BYTE_ARRAY_STRATEGY ); genericIndexedWriter.open(); @@ -132,7 +132,7 @@ public class GenericIndexedBenchmark FileChannel fileChannel = FileChannel.open(file.toPath()); MappedByteBuffer byteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, file.length()); - genericIndexed = GenericIndexed.read(byteBuffer, byteArrayStrategy, SmooshedFileMapper.load(smooshDir)); + genericIndexed = GenericIndexed.read(byteBuffer, BYTE_ARRAY_STRATEGY, SmooshedFileMapper.load(smooshDir)); } @Setup(Level.Trial) diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java index e691ea2039d..1c0e841285d 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/IncrementalIndexRowTypeBenchmark.java @@ -52,8 +52,8 @@ public class IncrementalIndexRowTypeBenchmark private IncrementalIndex incFloatIndex; private IncrementalIndex incStrIndex; private static AggregatorFactory[] aggs; - static final int dimensionCount = 8; - static final int maxRows = 250000; + static final int DIMENSION_COUNT = 8; + static final int MAX_ROWS = 250000; private ArrayList longRows = new ArrayList(); private ArrayList floatRows = new ArrayList(); @@ -61,9 +61,9 @@ public class IncrementalIndexRowTypeBenchmark static { - final ArrayList ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1); + final ArrayList ingestAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1); ingestAggregatorFactories.add(new CountAggregatorFactory("rows")); - for (int i = 0; i < dimensionCount; ++i) { + for (int i = 0; i < DIMENSION_COUNT; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( StringUtils.format("sumResult%s", i), @@ -125,23 +125,23 @@ public class IncrementalIndexRowTypeBenchmark .setSimpleTestingIndexSchema(aggs) .setDeserializeComplexMetrics(false) .setReportParseExceptions(false) - .setMaxRowCount(maxRows) + .setMaxRowCount(MAX_ROWS) .buildOnheap(); } @Setup public void setup() { - for (int i = 0; i < maxRows; i++) { - longRows.add(getLongRow(0, dimensionCount)); + for (int i = 0; i < MAX_ROWS; i++) { + longRows.add(getLongRow(0, DIMENSION_COUNT)); } - for (int i = 0; i < maxRows; i++) { - floatRows.add(getFloatRow(0, dimensionCount)); + for (int i = 0; i < MAX_ROWS; i++) { + floatRows.add(getFloatRow(0, DIMENSION_COUNT)); } - for (int i = 0; i < maxRows; i++) { - stringRows.add(getStringRow(0, dimensionCount)); + for (int i = 0; i < MAX_ROWS; i++) { + stringRows.add(getStringRow(0, DIMENSION_COUNT)); } } @@ -156,10 +156,10 @@ public class IncrementalIndexRowTypeBenchmark @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(maxRows) + @OperationsPerInvocation(MAX_ROWS) public void normalLongs(Blackhole blackhole) throws Exception { - for (int i = 0; i < maxRows; i++) { + for (int i = 0; i < MAX_ROWS; i++) { InputRow row = longRows.get(i); int rv = incIndex.add(row).getRowCount(); blackhole.consume(rv); @@ -169,10 +169,10 @@ public class IncrementalIndexRowTypeBenchmark @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(maxRows) + @OperationsPerInvocation(MAX_ROWS) public void normalFloats(Blackhole blackhole) throws Exception { - for (int i = 0; i < maxRows; i++) { + for (int i = 0; i < MAX_ROWS; i++) { InputRow row = floatRows.get(i); int rv = incFloatIndex.add(row).getRowCount(); blackhole.consume(rv); @@ -182,10 +182,10 @@ public class IncrementalIndexRowTypeBenchmark @Benchmark @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MICROSECONDS) - @OperationsPerInvocation(maxRows) + @OperationsPerInvocation(MAX_ROWS) public void normalStrings(Blackhole blackhole) throws Exception { - for (int i = 0; i < maxRows; i++) { + for (int i = 0; i < MAX_ROWS; i++) { InputRow row = stringRows.get(i); int rv = incStrIndex.add(row).getRowCount(); blackhole.consume(rv); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java index 68e3ae7623a..8495d672ead 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/LongCompressionBenchmarkFileGenerator.java @@ -46,11 +46,11 @@ public class LongCompressionBenchmarkFileGenerator { private static final Logger log = new Logger(LongCompressionBenchmarkFileGenerator.class); public static final int ROW_NUM = 5000000; - public static final List compressions = + public static final List COMPRESSIONS = ImmutableList.of( CompressionStrategy.LZ4, CompressionStrategy.NONE); - public static final List encodings = + public static final List ENCODINGS = ImmutableList.of(CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.LONGS); private static String dirPath = "longCompress/"; @@ -130,8 +130,8 @@ public class LongCompressionBenchmarkFileGenerator // create compressed files using all combinations of CompressionStrategy and LongEncoding provided for (Map.Entry entry : generators.entrySet()) { - for (CompressionStrategy compression : compressions) { - for (CompressionFactory.LongEncodingStrategy encoding : encodings) { + for (CompressionStrategy compression : COMPRESSIONS) { + for (CompressionFactory.LongEncodingStrategy encoding : ENCODINGS) { String name = entry.getKey() + "-" + compression + "-" + encoding; log.info("%s: ", name); File compFile = new File(dir, name); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/StupidPoolConcurrencyBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/StupidPoolConcurrencyBenchmark.java index d574a30d871..b7ed8eade07 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/StupidPoolConcurrencyBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/StupidPoolConcurrencyBenchmark.java @@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicLong; public class StupidPoolConcurrencyBenchmark { - private static final Object simpleObject = new Object(); + private static final Object SIMPLE_OBJECT = new Object(); @State(Scope.Benchmark) public static class BenchmarkPool @@ -50,7 +50,7 @@ public class StupidPoolConcurrencyBenchmark public Object get() { numPools.incrementAndGet(); - return simpleObject; + return SIMPLE_OBJECT; } } ); diff --git a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java index 6348d18af31..83dfe0ab8f3 100644 --- a/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/benchmark/query/timecompare/TimeCompareBenchmark.java @@ -116,12 +116,12 @@ public class TimeCompareBenchmark @Param({"100"}) private int threshold; - protected static final Map scriptDoubleSum = new HashMap<>(); + protected static final Map SCRIPT_DOUBLE_SUM = new HashMap<>(); static { - scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }"); - scriptDoubleSum.put("fnReset", "function reset() { return 0 }"); - scriptDoubleSum.put("fnCombine", "function combine(a,b) { return a + b }"); + SCRIPT_DOUBLE_SUM.put("fnAggregate", "function aggregate(current, a) { return current + a }"); + SCRIPT_DOUBLE_SUM.put("fnReset", "function reset() { return 0 }"); + SCRIPT_DOUBLE_SUM.put("fnCombine", "function combine(a,b) { return a + b }"); } private static final Logger log = new Logger(TimeCompareBenchmark.class); diff --git a/benchmarks/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategyBenchmark.java b/benchmarks/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategyBenchmark.java index f66367b0844..8b40d30f782 100644 --- a/benchmarks/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategyBenchmark.java +++ b/benchmarks/src/main/java/org/apache/druid/server/coordinator/CostBalancerStrategyBenchmark.java @@ -41,7 +41,7 @@ import java.util.concurrent.TimeUnit; @State(Scope.Benchmark) public class CostBalancerStrategyBenchmark { - private static final DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z"); + private static final DateTime T0 = DateTimes.of("2016-01-01T01:00:00Z"); private List segments; private DataSegment segment; @@ -55,12 +55,12 @@ public class CostBalancerStrategyBenchmark @Setup public void setupDummyCluster() { - segment = createSegment(t0); + segment = createSegment(T0); Random r = ThreadLocalRandom.current(); segments = new ArrayList<>(n); for (int i = 0; i < n; ++i) { - final DateTime t = t0.minusHours(r.nextInt(365 * 24) - 365 * 12); + final DateTime t = T0.minusHours(r.nextInt(365 * 24) - 365 * 12); segments.add(createSegment(t)); } } diff --git a/codestyle/checkstyle-suppressions.xml b/codestyle/checkstyle-suppressions.xml index 15604f37a47..fc3b63f1bd3 100644 --- a/codestyle/checkstyle-suppressions.xml +++ b/codestyle/checkstyle-suppressions.xml @@ -48,6 +48,13 @@ + + + + + + + diff --git a/codestyle/checkstyle.xml b/codestyle/checkstyle.xml index 424f2a33e7d..8635b70955e 100644 --- a/codestyle/checkstyle.xml +++ b/codestyle/checkstyle.xml @@ -306,7 +306,7 @@ codestyle/checkstyle.xml. "/> - + @@ -319,5 +319,8 @@ codestyle/checkstyle.xml. "/> + + + diff --git a/core/src/main/java/org/apache/druid/collections/ReferenceCountingResourceHolder.java b/core/src/main/java/org/apache/druid/collections/ReferenceCountingResourceHolder.java index 715921be373..808100dfb23 100644 --- a/core/src/main/java/org/apache/druid/collections/ReferenceCountingResourceHolder.java +++ b/core/src/main/java/org/apache/druid/collections/ReferenceCountingResourceHolder.java @@ -33,11 +33,11 @@ public class ReferenceCountingResourceHolder implements ResourceHolder { private static final Logger log = new Logger(ReferenceCountingResourceHolder.class); - private static final AtomicLong leakedResources = new AtomicLong(); + private static final AtomicLong LEAKED_RESOURCES = new AtomicLong(); public static long leakedResources() { - return leakedResources.get(); + return LEAKED_RESOURCES.get(); } private final T object; @@ -164,7 +164,7 @@ public class ReferenceCountingResourceHolder implements ResourceHolder } if (refCount.compareAndSet(count, 0)) { try { - leakedResources.incrementAndGet(); + LEAKED_RESOURCES.incrementAndGet(); closer.close(); return; } diff --git a/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java b/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java index 7883ae02ce4..29334f9306c 100644 --- a/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java +++ b/core/src/main/java/org/apache/druid/data/input/impl/TimestampSpec.java @@ -53,7 +53,7 @@ public class TimestampSpec private final Function timestampConverter; // remember last value parsed - private static final ThreadLocal parseCtx = ThreadLocal.withInitial(ParseCtx::new); + private static final ThreadLocal PARSE_CTX = ThreadLocal.withInitial(ParseCtx::new); @JsonCreator public TimestampSpec( @@ -98,7 +98,7 @@ public class TimestampSpec { DateTime extracted = missingValue; if (input != null) { - ParseCtx ctx = parseCtx.get(); + ParseCtx ctx = PARSE_CTX.get(); // Check if the input is equal to the last input, so we don't need to parse it again if (input.equals(ctx.lastTimeObject)) { extracted = ctx.lastDateTime; @@ -107,7 +107,7 @@ public class TimestampSpec ParseCtx newCtx = new ParseCtx(); newCtx.lastTimeObject = input; newCtx.lastDateTime = extracted; - parseCtx.set(newCtx); + PARSE_CTX.set(newCtx); } } return extracted; diff --git a/core/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java b/core/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java index 05c733ceb56..2ef3835419d 100644 --- a/core/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java +++ b/core/src/main/java/org/apache/druid/jackson/CommaListJoinSerializer.java @@ -31,7 +31,7 @@ import java.util.List; */ public class CommaListJoinSerializer extends StdScalarSerializer> { - private static final Joiner joiner = Joiner.on(","); + private static final Joiner JOINER = Joiner.on(","); protected CommaListJoinSerializer() { @@ -41,6 +41,6 @@ public class CommaListJoinSerializer extends StdScalarSerializer> @Override public void serialize(List value, JsonGenerator jgen, SerializerProvider provider) throws IOException { - jgen.writeString(joiner.join(value)); + jgen.writeString(JOINER.join(value)); } } diff --git a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java index 3a9cdac17b7..5d130b0b038 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java +++ b/core/src/main/java/org/apache/druid/java/util/common/granularity/Granularity.java @@ -43,11 +43,11 @@ public abstract class Granularity implements Cacheable /** * Default patterns for parsing paths. */ - private static final Pattern defaultPathPattern = + private static final Pattern DEFAULT_PATH_PATTERN = Pattern.compile( "^.*[Yy]=(\\d{4})/(?:[Mm]=(\\d{2})/(?:[Dd]=(\\d{2})/(?:[Hh]=(\\d{2})/(?:[Mm]=(\\d{2})/(?:[Ss]=(\\d{2})/)?)?)?)?)?.*$" ); - private static final Pattern hivePathPattern = + private static final Pattern HIVE_PATH_PATTERN = Pattern.compile("^.*dt=(\\d{4})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})?)?)?)?)?)?/.*$"); @JsonCreator @@ -150,13 +150,13 @@ public abstract class Granularity implements Cacheable // Used by the toDate implementations. final Integer[] getDateValues(String filePath, Formatter formatter) { - Pattern pattern = defaultPathPattern; + Pattern pattern = DEFAULT_PATH_PATTERN; switch (formatter) { case DEFAULT: case LOWER_DEFAULT: break; case HIVE: - pattern = hivePathPattern; + pattern = HIVE_PATH_PATTERN; break; default: throw new IAE("Format %s not supported", formatter); diff --git a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java index a81fc0a9725..1139b5cec73 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java +++ b/core/src/main/java/org/apache/druid/java/util/common/io/smoosh/FileSmoosher.java @@ -70,7 +70,7 @@ import java.util.TreeMap; public class FileSmoosher implements Closeable { private static final String FILE_EXTENSION = "smoosh"; - private static final Joiner joiner = Joiner.on(","); + private static final Joiner JOINER = Joiner.on(","); private static final Logger LOG = new Logger(FileSmoosher.class); private final File baseDir; @@ -376,7 +376,7 @@ public class FileSmoosher implements Closeable for (Map.Entry entry : internalFiles.entrySet()) { final Metadata metadata = entry.getValue(); out.write( - joiner.join( + JOINER.join( entry.getKey(), metadata.getFileNum(), metadata.getStartOffset(), diff --git a/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java b/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java index 70a81cb20c8..11d72e84ef5 100644 --- a/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java +++ b/core/src/main/java/org/apache/druid/java/util/common/parsers/JSONToLowerParser.java @@ -47,7 +47,7 @@ import java.util.Set; @Deprecated public class JSONToLowerParser implements Parser { - private static final Function valueFunction = new Function() + private static final Function VALUE_FUNCTION = new Function() { @Override public Object apply(JsonNode node) @@ -128,14 +128,14 @@ public class JSONToLowerParser implements Parser if (node.isArray()) { final List nodeValue = Lists.newArrayListWithExpectedSize(node.size()); for (final JsonNode subnode : node) { - final Object subnodeValue = valueFunction.apply(subnode); + final Object subnodeValue = VALUE_FUNCTION.apply(subnode); if (subnodeValue != null) { nodeValue.add(subnodeValue); } } map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse() } else { - final Object nodeValue = valueFunction.apply(node); + final Object nodeValue = VALUE_FUNCTION.apply(node); if (nodeValue != null) { map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse() } diff --git a/core/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java b/core/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java index 1036fb96532..3f1756c97b3 100644 --- a/core/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java +++ b/core/src/main/java/org/apache/druid/java/util/emitter/core/HttpPostEmitter.java @@ -90,7 +90,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter private static final byte[] LARGE_EVENTS_STOP = new byte[]{}; private static final Logger log = new Logger(HttpPostEmitter.class); - private static final AtomicInteger instanceCounter = new AtomicInteger(); + private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger(); final BatchingStrategy batchingStrategy; final HttpEmitterConfig config; @@ -484,7 +484,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter EmittingThread(HttpEmitterConfig config) { - super("HttpPostEmitter-" + instanceCounter.incrementAndGet()); + super("HttpPostEmitter-" + INSTANCE_COUNTER.incrementAndGet()); setDaemon(true); timeoutLessThanMinimumException = new TimeoutException( "Timeout less than minimum [" + config.getMinHttpTimeoutMillis() + "] ms." diff --git a/core/src/main/java/org/apache/druid/java/util/http/client/Request.java b/core/src/main/java/org/apache/druid/java/util/http/client/Request.java index f9f73f23c79..8fe5d0f1798 100644 --- a/core/src/main/java/org/apache/druid/java/util/http/client/Request.java +++ b/core/src/main/java/org/apache/druid/java/util/http/client/Request.java @@ -44,7 +44,7 @@ import java.util.Map; */ public class Request { - private static final ChannelBufferFactory factory = HeapChannelBufferFactory.getInstance(); + private static final ChannelBufferFactory FACTORY = HeapChannelBufferFactory.getInstance(); private final HttpMethod method; private final URL url; @@ -147,7 +147,7 @@ public class Request public Request setContent(String contentType, byte[] bytes, int offset, int length) { - return setContent(contentType, factory.getBuffer(bytes, offset, length)); + return setContent(contentType, FACTORY.getBuffer(bytes, offset, length)); } public Request setContent(String contentType, ChannelBuffer content) diff --git a/core/src/main/java/org/apache/druid/java/util/metrics/SigarUtil.java b/core/src/main/java/org/apache/druid/java/util/metrics/SigarUtil.java index 26ff73f2bde..e4299fc9dda 100644 --- a/core/src/main/java/org/apache/druid/java/util/metrics/SigarUtil.java +++ b/core/src/main/java/org/apache/druid/java/util/metrics/SigarUtil.java @@ -73,12 +73,12 @@ public class SigarUtil */ private static class CurrentProcessIdHolder { - private static final long currentProcessId = new Sigar().getPid(); + private static final long CURRENT_PROCESS_ID = new Sigar().getPid(); } public static long getCurrentProcessId() { - return CurrentProcessIdHolder.currentProcessId; + return CurrentProcessIdHolder.CURRENT_PROCESS_ID; } } diff --git a/core/src/main/java/org/apache/druid/timeline/partition/HashBasedNumberedShardSpec.java b/core/src/main/java/org/apache/druid/timeline/partition/HashBasedNumberedShardSpec.java index 88dfc8397d2..a03fddceaf3 100644 --- a/core/src/main/java/org/apache/druid/timeline/partition/HashBasedNumberedShardSpec.java +++ b/core/src/main/java/org/apache/druid/timeline/partition/HashBasedNumberedShardSpec.java @@ -38,7 +38,7 @@ import java.util.List; public class HashBasedNumberedShardSpec extends NumberedShardSpec { - private static final HashFunction hashFunction = Hashing.murmur3_32(); + private static final HashFunction HASH_FUNCTION = Hashing.murmur3_32(); private static final List DEFAULT_PARTITION_DIMENSIONS = ImmutableList.of(); private final ObjectMapper jsonMapper; @@ -100,7 +100,7 @@ public class HashBasedNumberedShardSpec extends NumberedShardSpec @VisibleForTesting public static int hash(ObjectMapper jsonMapper, List objects) throws JsonProcessingException { - return hashFunction.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt(); + return HASH_FUNCTION.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt(); } @Override diff --git a/core/src/test/java/org/apache/druid/collections/SerializablePairTest.java b/core/src/test/java/org/apache/druid/collections/SerializablePairTest.java index f46ed424748..3ce84429b46 100644 --- a/core/src/test/java/org/apache/druid/collections/SerializablePairTest.java +++ b/core/src/test/java/org/apache/druid/collections/SerializablePairTest.java @@ -27,14 +27,14 @@ import java.io.IOException; public class SerializablePairTest { - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Test public void testBytesSerde() throws IOException { SerializablePair pair = new SerializablePair<>(5L, 9L); - byte[] bytes = jsonMapper.writeValueAsBytes(pair); - SerializablePair deserializedPair = jsonMapper.readValue(bytes, SerializablePair.class); + byte[] bytes = JSON_MAPPER.writeValueAsBytes(pair); + SerializablePair deserializedPair = JSON_MAPPER.readValue(bytes, SerializablePair.class); Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue()); Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue()); } @@ -43,8 +43,8 @@ public class SerializablePairTest public void testStringSerde() throws IOException { SerializablePair pair = new SerializablePair<>(5L, 9L); - String str = jsonMapper.writeValueAsString(pair); - SerializablePair deserializedPair = jsonMapper.readValue(str, SerializablePair.class); + String str = JSON_MAPPER.writeValueAsString(pair); + SerializablePair deserializedPair = JSON_MAPPER.readValue(str, SerializablePair.class); Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue()); Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue()); } diff --git a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java index c1d1b318d00..1564daca535 100644 --- a/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java +++ b/core/src/test/java/org/apache/druid/data/input/impl/prefetch/PrefetchableTextFilesFirehoseFactoryTest.java @@ -64,7 +64,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest { private static long FILE_SIZE = -1; - private static final StringInputRowParser parser = new StringInputRowParser( + private static final StringInputRowParser PARSER = new StringInputRowParser( new CSVParseSpec( new TimestampSpec( "timestamp", @@ -163,7 +163,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -182,7 +182,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -201,7 +201,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -220,7 +220,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -238,7 +238,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -256,7 +256,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -274,7 +274,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -292,7 +292,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final List rows = new ArrayList<>(); final File firehoseTmpDir = createFirehoseTmpDir("testRetry"); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { while (firehose.hasMore()) { rows.add(firehose.nextRow()); } @@ -312,7 +312,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5); - try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testMaxRetry"))) { + try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testMaxRetry"))) { while (firehose.hasMore()) { firehose.nextRow(); } @@ -328,7 +328,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest final TestPrefetchableTextFilesFirehoseFactory factory = TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000); - try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testTimeout"))) { + try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testTimeout"))) { while (firehose.hasMore()) { firehose.nextRow(); } @@ -344,7 +344,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest for (int i = 0; i < 5; i++) { final List rows = new ArrayList<>(); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { if (i > 0) { Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes()); } @@ -367,7 +367,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest for (int i = 0; i < 5; i++) { final List rows = new ArrayList<>(); - try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) { + try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) { if (i > 0) { Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes()); } diff --git a/core/src/test/java/org/apache/druid/java/util/common/CompressionUtilsTest.java b/core/src/test/java/org/apache/druid/java/util/common/CompressionUtilsTest.java index 56d07ad4623..63fa47c7781 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/CompressionUtilsTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/CompressionUtilsTest.java @@ -62,9 +62,9 @@ import java.util.zip.ZipOutputStream; public class CompressionUtilsTest { - private static final String content; - private static final byte[] expected; - private static final byte[] gzBytes; + private static final String CONTENT; + private static final byte[] EXPECTED; + private static final byte[] GZ_BYTES; static { final StringBuilder builder = new StringBuilder(); @@ -79,19 +79,19 @@ public class CompressionUtilsTest catch (IOException e) { throw new RuntimeException(e); } - content = builder.toString(); - expected = StringUtils.toUtf8(content); + CONTENT = builder.toString(); + EXPECTED = StringUtils.toUtf8(CONTENT); - final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(expected.length); + final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(EXPECTED.length); try (GZIPOutputStream outputStream = new GZIPOutputStream(gzByteStream)) { - try (ByteArrayInputStream in = new ByteArrayInputStream(expected)) { + try (ByteArrayInputStream in = new ByteArrayInputStream(EXPECTED)) { ByteStreams.copy(in, outputStream); } } catch (IOException e) { throw new RuntimeException(e); } - gzBytes = gzByteStream.toByteArray(); + GZ_BYTES = gzByteStream.toByteArray(); } @Rule @@ -101,9 +101,9 @@ public class CompressionUtilsTest public static void assertGoodDataStream(InputStream stream) throws IOException { - try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) { + try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length)) { ByteStreams.copy(stream, bos); - Assert.assertArrayEquals(expected, bos.toByteArray()); + Assert.assertArrayEquals(EXPECTED, bos.toByteArray()); } } @@ -113,7 +113,7 @@ public class CompressionUtilsTest testDir = temporaryFolder.newFolder("testDir"); testFile = new File(testDir, "test.dat"); try (OutputStream outputStream = new FileOutputStream(testFile)) { - outputStream.write(StringUtils.toUtf8(content)); + outputStream.write(StringUtils.toUtf8(CONTENT)); } Assert.assertTrue(testFile.getParentFile().equals(testDir)); } @@ -395,35 +395,35 @@ public class CompressionUtilsTest { try (OutputStream outputStream = new FileOutputStream(testFile)) { Assert.assertEquals( - gzBytes.length, + GZ_BYTES.length, ByteStreams.copy( - new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)), outputStream ) ); Assert.assertEquals( - gzBytes.length, + GZ_BYTES.length, ByteStreams.copy( - new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)), outputStream ) ); Assert.assertEquals( - gzBytes.length, + GZ_BYTES.length, ByteStreams.copy( - new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)), + new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)), outputStream ) ); } - Assert.assertEquals(gzBytes.length * 3, testFile.length()); + Assert.assertEquals(GZ_BYTES.length * 3, testFile.length()); try (InputStream inputStream = new ZeroRemainingInputStream(new FileInputStream(testFile))) { for (int i = 0; i < 3; ++i) { - final byte[] bytes = new byte[gzBytes.length]; + final byte[] bytes = new byte[GZ_BYTES.length]; Assert.assertEquals(bytes.length, inputStream.read(bytes)); Assert.assertArrayEquals( StringUtils.format("Failed on range %d", i), - gzBytes, + GZ_BYTES, bytes ); } @@ -435,10 +435,10 @@ public class CompressionUtilsTest // http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 public void testGunzipBug() throws IOException { - final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); try (final InputStream inputStream = new GZIPInputStream( new ZeroRemainingInputStream( new ByteArrayInputStream( @@ -446,17 +446,17 @@ public class CompressionUtilsTest ) ) )) { - try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) { + try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) { Assert.assertEquals( "Read terminated too soon (bug 7036144)", - expected.length * 3, + EXPECTED.length * 3, ByteStreams.copy(inputStream, outputStream) ); final byte[] found = outputStream.toByteArray(); - Assert.assertEquals(expected.length * 3, found.length); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + Assert.assertEquals(EXPECTED.length * 3, found.length); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3)); } } } @@ -468,10 +468,10 @@ public class CompressionUtilsTest testFile.delete(); Assert.assertFalse(testFile.exists()); - final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); final ByteSource inputStreamFactory = new ByteSource() { @@ -482,20 +482,20 @@ public class CompressionUtilsTest } }; - Assert.assertEquals((long) (expected.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size()); + Assert.assertEquals((long) (EXPECTED.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size()); try (final InputStream inputStream = new FileInputStream(testFile)) { - try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) { + try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) { Assert.assertEquals( "Read terminated too soon (7036144)", - expected.length * 3, + EXPECTED.length * 3, ByteStreams.copy(inputStream, outputStream) ); final byte[] found = outputStream.toByteArray(); - Assert.assertEquals(expected.length * 3, found.length); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + Assert.assertEquals(EXPECTED.length * 3, found.length); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3)); } } } @@ -505,14 +505,14 @@ public class CompressionUtilsTest public void testGunzipBugStreamWorkarround() throws IOException { - final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); - tripleGzByteStream.write(gzBytes); + final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); + tripleGzByteStream.write(GZ_BYTES); - try (ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length * 3)) { + try (ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length * 3)) { Assert.assertEquals( - expected.length * 3, + EXPECTED.length * 3, CompressionUtils.gunzip( new ZeroRemainingInputStream( new ByteArrayInputStream(tripleGzByteStream.toByteArray()) @@ -520,10 +520,10 @@ public class CompressionUtilsTest ) ); final byte[] found = bos.toByteArray(); - Assert.assertEquals(expected.length * 3, found.length); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2)); - Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3)); + Assert.assertEquals(EXPECTED.length * 3, found.length); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2)); + Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3)); } } @@ -704,7 +704,7 @@ public class CompressionUtilsTest @Override public int read(byte b[]) throws IOException { - final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length); + final int len = Math.min(b.length, GZ_BYTES.length - pos.get() % GZ_BYTES.length); pos.addAndGet(len); return read(b, 0, len); } @@ -719,7 +719,7 @@ public class CompressionUtilsTest @Override public int read(byte b[], int off, int len) throws IOException { - final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length); + final int l = Math.min(len, GZ_BYTES.length - pos.get() % GZ_BYTES.length); pos.addAndGet(l); return super.read(b, off, l); } diff --git a/core/src/test/java/org/apache/druid/java/util/common/RetryUtilsTest.java b/core/src/test/java/org/apache/druid/java/util/common/RetryUtilsTest.java index a9114d87ca8..b82d6bb1ae0 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/RetryUtilsTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/RetryUtilsTest.java @@ -28,7 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger; public class RetryUtilsTest { - private static final Predicate isTransient = new Predicate() + private static final Predicate IS_TRANSIENT = new Predicate() { @Override public boolean apply(Throwable e) @@ -46,7 +46,7 @@ public class RetryUtilsTest count.incrementAndGet(); return "hey"; }, - isTransient, + IS_TRANSIENT, 2 ); Assert.assertEquals("result", "hey", result); @@ -64,7 +64,7 @@ public class RetryUtilsTest count.incrementAndGet(); throw new IOException("what"); }, - isTransient, + IS_TRANSIENT, 2 ); } @@ -87,7 +87,7 @@ public class RetryUtilsTest throw new IOException("what"); } }, - isTransient, + IS_TRANSIENT, 3 ); Assert.assertEquals("result", "hey", result); @@ -108,7 +108,7 @@ public class RetryUtilsTest throw new IOException("uhh"); } }, - isTransient, + IS_TRANSIENT, 3 ); } diff --git a/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java b/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java index 26b19afcf52..db6aee1f0e0 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/lifecycle/LifecycleTest.java @@ -42,7 +42,7 @@ import java.util.concurrent.atomic.AtomicLong; */ public class LifecycleTest { - private static final Lifecycle.Handler dummyHandler = new Lifecycle.Handler() + private static final Lifecycle.Handler DUMMY_HANDLER = new Lifecycle.Handler() { @Override public void start() @@ -319,7 +319,7 @@ public class LifecycleTest reachedStop.await(); try { - lifecycle.addHandler(dummyHandler); + lifecycle.addHandler(DUMMY_HANDLER); Assert.fail("Expected exception"); } catch (IllegalStateException e) { @@ -327,7 +327,7 @@ public class LifecycleTest } try { - lifecycle.addMaybeStartHandler(dummyHandler); + lifecycle.addMaybeStartHandler(DUMMY_HANDLER); Assert.fail("Expected exception"); } catch (IllegalStateException e) { diff --git a/core/src/test/java/org/apache/druid/java/util/common/parsers/FlatTextFormatParserTest.java b/core/src/test/java/org/apache/druid/java/util/common/parsers/FlatTextFormatParserTest.java index 503bac79cdf..571a0d359b8 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/parsers/FlatTextFormatParserTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/parsers/FlatTextFormatParserTest.java @@ -49,7 +49,7 @@ public class FlatTextFormatParserTest ); } - private static final FlatTextFormatParserFactory parserFactory = new FlatTextFormatParserFactory(); + private static final FlatTextFormatParserFactory PARSER_FACTORY = new FlatTextFormatParserFactory(); @Rule public ExpectedException expectedException = ExpectedException.none(); @@ -65,7 +65,7 @@ public class FlatTextFormatParserTest public void testValidHeader() { final String header = concat(format, "time", "value1", "value2"); - final Parser parser = parserFactory.get(format, header); + final Parser parser = PARSER_FACTORY.get(format, header); Assert.assertEquals(ImmutableList.of("time", "value1", "value2"), parser.getFieldNames()); } @@ -77,14 +77,14 @@ public class FlatTextFormatParserTest expectedException.expect(ParseException.class); expectedException.expectMessage(StringUtils.format("Unable to parse header [%s]", header)); - parserFactory.get(format, header); + PARSER_FACTORY.get(format, header); } @Test public void testWithHeader() { final String header = concat(format, "time", "value1", "value2"); - final Parser parser = parserFactory.get(format, header); + final Parser parser = PARSER_FACTORY.get(format, header); final String body = concat(format, "hello", "world", "foo"); final Map jsonMap = parser.parseToMap(body); Assert.assertEquals( @@ -97,7 +97,7 @@ public class FlatTextFormatParserTest @Test public void testWithoutHeader() { - final Parser parser = parserFactory.get(format); + final Parser parser = PARSER_FACTORY.get(format); final String body = concat(format, "hello", "world", "foo"); final Map jsonMap = parser.parseToMap(body); Assert.assertEquals( @@ -111,7 +111,7 @@ public class FlatTextFormatParserTest public void testWithSkipHeaderRows() { final int skipHeaderRows = 2; - final Parser parser = parserFactory.get(format, false, skipHeaderRows); + final Parser parser = PARSER_FACTORY.get(format, false, skipHeaderRows); parser.startFileFromBeginning(); final String[] body = new String[]{ concat(format, "header", "line", "1"), @@ -133,7 +133,7 @@ public class FlatTextFormatParserTest @Test public void testWithHeaderRow() { - final Parser parser = parserFactory.get(format, true, 0); + final Parser parser = PARSER_FACTORY.get(format, true, 0); parser.startFileFromBeginning(); final String[] body = new String[]{ concat(format, "time", "value1", "value2"), @@ -151,7 +151,7 @@ public class FlatTextFormatParserTest @Test public void testWithHeaderRowOfEmptyColumns() { - final Parser parser = parserFactory.get(format, true, 0); + final Parser parser = PARSER_FACTORY.get(format, true, 0); parser.startFileFromBeginning(); final String[] body = new String[]{ concat(format, "time", "", "value2", ""), @@ -169,7 +169,7 @@ public class FlatTextFormatParserTest @Test public void testWithDifferentHeaderRows() { - final Parser parser = parserFactory.get(format, true, 0); + final Parser parser = PARSER_FACTORY.get(format, true, 0); parser.startFileFromBeginning(); final String[] body = new String[]{ concat(format, "time", "value1", "value2"), @@ -206,7 +206,7 @@ public class FlatTextFormatParserTest ); final int skipHeaderRows = 2; - final Parser parser = parserFactory.get(format, false, skipHeaderRows); + final Parser parser = PARSER_FACTORY.get(format, false, skipHeaderRows); final String[] body = new String[]{ concat(format, "header", "line", "1"), concat(format, "header", "line", "2"), diff --git a/core/src/test/java/org/apache/druid/java/util/common/parsers/JSONPathParserTest.java b/core/src/test/java/org/apache/druid/java/util/common/parsers/JSONPathParserTest.java index b4ee2849daa..0238e6b1173 100644 --- a/core/src/test/java/org/apache/druid/java/util/common/parsers/JSONPathParserTest.java +++ b/core/src/test/java/org/apache/druid/java/util/common/parsers/JSONPathParserTest.java @@ -32,13 +32,13 @@ import java.util.Map; public class JSONPathParserTest { - private static final String json = + private static final String JSON = "{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}"; - private static final String numbersJson = + private static final String NUMBERS_JSON = "{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}"; - private static final String whackyCharacterJson = + private static final String WHACKY_CHARACTER_JSON = "{\"one\": \"foo\\uD900\"}"; - private static final String nestedJson = + private static final String NESTED_JSON = "{\"simpleVal\":\"text\", \"ignore_me\":[1, {\"x\":2}], \"blah\":[4,5,6], \"newmet\":5, " + "\"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, " + "\"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", " + @@ -47,7 +47,7 @@ public class JSONPathParserTest "\"testMapConvert\":{\"big\": 1234567890000000000000, \"big2\":{\"big2\":1234567890000000000000}}, " + "\"testEmptyList\": [], " + "\"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":[7,8,9]}}"; - private static final String notJson = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK"; + private static final String NOT_JSON = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK"; @Rule public ExpectedException thrown = ExpectedException.none(); @@ -57,7 +57,7 @@ public class JSONPathParserTest { List fields = new ArrayList<>(); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - final Map jsonMap = jsonParser.parseToMap(json); + final Map jsonMap = jsonParser.parseToMap(JSON); Assert.assertEquals( "jsonMap", ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"), @@ -70,7 +70,7 @@ public class JSONPathParserTest { List fields = new ArrayList<>(); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - final Map jsonMap = jsonParser.parseToMap(numbersJson); + final Map jsonMap = jsonParser.parseToMap(NUMBERS_JSON); Assert.assertEquals( "jsonMap", ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21), @@ -83,7 +83,7 @@ public class JSONPathParserTest { List fields = new ArrayList<>(); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - final Map jsonMap = jsonParser.parseToMap(whackyCharacterJson); + final Map jsonMap = jsonParser.parseToMap(WHACKY_CHARACTER_JSON); Assert.assertEquals( "jsonMap", ImmutableMap.of("one", "foo?"), @@ -113,7 +113,7 @@ public class JSONPathParserTest final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - final Map jsonMap = jsonParser.parseToMap(nestedJson); + final Map jsonMap = jsonParser.parseToMap(NESTED_JSON); // Root fields Assert.assertEquals(ImmutableList.of(1L, 2L, 3L), jsonMap.get("baz")); @@ -174,7 +174,7 @@ public class JSONPathParserTest fields.add(new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq-met-array", ".met.a")); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null); - final Map jsonMap = jsonParser.parseToMap(nestedJson); + final Map jsonMap = jsonParser.parseToMap(NESTED_JSON); // Root fields Assert.assertEquals("text", jsonMap.get("simpleVal")); @@ -211,7 +211,7 @@ public class JSONPathParserTest thrown.expectMessage("Cannot have duplicate field definition: met-array"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null); - jsonParser.parseToMap(nestedJson); + jsonParser.parseToMap(NESTED_JSON); } @Test @@ -225,7 +225,7 @@ public class JSONPathParserTest thrown.expectMessage("Cannot have duplicate field definition: met-array"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null); - jsonParser.parseToMap(nestedJson); + jsonParser.parseToMap(NESTED_JSON); } @Test @@ -234,9 +234,9 @@ public class JSONPathParserTest List fields = new ArrayList<>(); thrown.expect(ParseException.class); - thrown.expectMessage("Unable to parse row [" + notJson + "]"); + thrown.expectMessage("Unable to parse row [" + NOT_JSON + "]"); final Parser jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null); - jsonParser.parseToMap(notJson); + jsonParser.parseToMap(NOT_JSON); } } diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java index fd4235127de..591fceaee0c 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/EmitterTest.java @@ -57,7 +57,7 @@ import java.util.stream.Stream; */ public class EmitterTest { - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); public static String TARGET_URL = "http://metrics.foo.bar/"; public static final Response OK_RESPONSE = Stream .of(responseBuilder(HttpVersion.HTTP_1_1, HttpResponseStatus.CREATED)) @@ -120,7 +120,7 @@ public class EmitterTest HttpPostEmitter emitter = new HttpPostEmitter( config, httpClient, - jsonMapper + JSON_MAPPER ); emitter.start(); return emitter; @@ -135,7 +135,7 @@ public class EmitterTest HttpPostEmitter emitter = new HttpPostEmitter( config, httpClient, - jsonMapper + JSON_MAPPER ); emitter.start(); return emitter; @@ -150,7 +150,7 @@ public class EmitterTest props.setProperty("org.apache.druid.java.util.emitter.flushCount", String.valueOf(size)); Lifecycle lifecycle = new Lifecycle(); - Emitter emitter = Emitters.create(props, httpClient, jsonMapper, lifecycle); + Emitter emitter = Emitters.create(props, httpClient, JSON_MAPPER, lifecycle); Assert.assertTrue(StringUtils.format( "HttpPostEmitter emitter should be created, but found %s", emitter.getClass().getName() @@ -169,7 +169,7 @@ public class EmitterTest HttpPostEmitter emitter = new HttpPostEmitter( config, httpClient, - jsonMapper + JSON_MAPPER ); emitter.start(); return emitter; @@ -187,7 +187,7 @@ public class EmitterTest HttpPostEmitter emitter = new HttpPostEmitter( config, httpClient, - jsonMapper + JSON_MAPPER ); emitter.start(); return emitter; @@ -203,7 +203,7 @@ public class EmitterTest HttpPostEmitter emitter = new HttpPostEmitter( config, httpClient, - jsonMapper + JSON_MAPPER ); emitter.start(); return emitter; @@ -232,8 +232,8 @@ public class EmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); @@ -274,8 +274,8 @@ public class EmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); @@ -459,8 +459,8 @@ public class EmitterTest Assert.assertEquals( StringUtils.format( "%s\n%s\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); @@ -513,8 +513,8 @@ public class EmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(counter.getAndIncrement())), - jsonMapper.writeValueAsString(events.get(counter.getAndIncrement())) + JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement())), + JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement())) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); @@ -576,8 +576,8 @@ public class EmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), baos.toString(StandardCharsets.UTF_8.name()) ); diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpEmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpEmitterTest.java index 07acd4a88ba..df726657c6a 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpEmitterTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpEmitterTest.java @@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicLong; public class HttpEmitterTest { private final MockHttpClient httpClient = new MockHttpClient(); - private static final ObjectMapper objectMapper = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() { @Override public byte[] writeValueAsBytes(Object value) @@ -71,7 +71,7 @@ public class HttpEmitterTest .setBatchingStrategy(BatchingStrategy.ONLY_EVENTS) .setHttpTimeoutAllowanceFactor(timeoutAllowanceFactor) .build(); - final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper); + final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER); long startMs = System.currentTimeMillis(); emitter.start(); diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterStressTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterStressTest.java index 94c9d5bc414..f2d2135ae0d 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterStressTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterStressTest.java @@ -42,7 +42,7 @@ import java.util.concurrent.ThreadLocalRandom; public class HttpPostEmitterStressTest { private static final int N = 10_000; - private static final ObjectMapper objectMapper = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() { @Override public byte[] writeValueAsBytes(Object value) @@ -64,7 +64,7 @@ public class HttpPostEmitterStressTest // For this test, we don't need any batches to be dropped, i. e. "gaps" in data .setBatchQueueSizeLimit(1000) .build(); - final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper); + final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER); int nThreads = Runtime.getRuntime().availableProcessors() * 2; final List eventsPerThread = new ArrayList<>(nThreads); final List> eventBatchesPerThread = new ArrayList<>(nThreads); diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterTest.java index a1a6a3f732f..4283d124a4f 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/HttpPostEmitterTest.java @@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicReference; public class HttpPostEmitterTest { - private static final ObjectMapper objectMapper = new ObjectMapper() + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() { @Override public byte[] writeValueAsBytes(Object value) @@ -72,7 +72,7 @@ public class HttpPostEmitterTest .setMaxBatchSize(1024 * 1024) .setBatchQueueSizeLimit(1000) .build(); - final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper); + final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER); emitter.start(); // emit first event diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java index db6660fb8f9..4710d5817d4 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java @@ -42,7 +42,7 @@ import java.util.Properties; public class ParametrizedUriEmitterTest { - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); private MockHttpClient httpClient; private Lifecycle lifecycle; @@ -98,8 +98,8 @@ public class ParametrizedUriEmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); @@ -148,8 +148,8 @@ public class ParametrizedUriEmitterTest emitter.flush(); Assert.assertTrue(httpClient.succeeded()); Map expected = ImmutableMap.of( - "http://example.com/test1", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(0))), - "http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1))) + "http://example.com/test1", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(0))), + "http://example.com/test2", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(1))) ); Assert.assertEquals(expected, results); } @@ -173,8 +173,8 @@ public class ParametrizedUriEmitterTest Assert.assertEquals( StringUtils.format( "[%s,%s]\n", - jsonMapper.writeValueAsString(events.get(0)), - jsonMapper.writeValueAsString(events.get(1)) + JSON_MAPPER.writeValueAsString(events.get(0)), + JSON_MAPPER.writeValueAsString(events.get(1)) ), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); diff --git a/core/src/test/java/org/apache/druid/metadata/DefaultPasswordProviderTest.java b/core/src/test/java/org/apache/druid/metadata/DefaultPasswordProviderTest.java index 20278f026fa..83ab9473b2e 100644 --- a/core/src/test/java/org/apache/druid/metadata/DefaultPasswordProviderTest.java +++ b/core/src/test/java/org/apache/druid/metadata/DefaultPasswordProviderTest.java @@ -25,46 +25,46 @@ import org.junit.Test; public class DefaultPasswordProviderTest { - private static final String pwd = "nothing"; - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final String PWD = "nothing"; + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Test public void testExplicitConstruction() { - DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd); - Assert.assertEquals(pwd, pp.getPassword()); + DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD); + Assert.assertEquals(PWD, pp.getPassword()); } @Test public void testFromStringConstruction() { - DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(pwd); - Assert.assertEquals(pwd, pp.getPassword()); + DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(PWD); + Assert.assertEquals(PWD, pp.getPassword()); } @Test public void testDeserializationFromJsonString() throws Exception { - PasswordProvider pp = jsonMapper.readValue("\"" + pwd + "\"", + PasswordProvider pp = JSON_MAPPER.readValue("\"" + PWD + "\"", PasswordProvider.class); - Assert.assertEquals(pwd, pp.getPassword()); + Assert.assertEquals(PWD, pp.getPassword()); } @Test public void testDeserializationFromJson() throws Exception { - PasswordProvider pp = jsonMapper.readValue( - "{\"type\": \"default\", \"password\": \"" + pwd + "\"}", + PasswordProvider pp = JSON_MAPPER.readValue( + "{\"type\": \"default\", \"password\": \"" + PWD + "\"}", PasswordProvider.class); - Assert.assertEquals(pwd, pp.getPassword()); + Assert.assertEquals(PWD, pp.getPassword()); } @Test public void testSerializationWithMixIn() throws Exception { - DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd); - jsonMapper.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class); - String valueAsString = jsonMapper.writeValueAsString(pp); + DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD); + JSON_MAPPER.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class); + String valueAsString = JSON_MAPPER.writeValueAsString(pp); Assert.assertEquals("{\"type\":\"default\"}", valueAsString); } } diff --git a/core/src/test/java/org/apache/druid/metadata/EnvironmentVariablePasswordProviderTest.java b/core/src/test/java/org/apache/druid/metadata/EnvironmentVariablePasswordProviderTest.java index 36a6789a8c7..fd0b6030048 100644 --- a/core/src/test/java/org/apache/druid/metadata/EnvironmentVariablePasswordProviderTest.java +++ b/core/src/test/java/org/apache/druid/metadata/EnvironmentVariablePasswordProviderTest.java @@ -27,16 +27,16 @@ import java.io.IOException; public class EnvironmentVariablePasswordProviderTest { - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Test public void testSerde() throws IOException { String providerString = "{\"type\": \"environment\", \"variable\" : \"test\"}"; - PasswordProvider provider = jsonMapper.readValue(providerString, PasswordProvider.class); + PasswordProvider provider = JSON_MAPPER.readValue(providerString, PasswordProvider.class); Assert.assertTrue(provider instanceof EnvironmentVariablePasswordProvider); Assert.assertEquals("test", ((EnvironmentVariablePasswordProvider) provider).getVariable()); - PasswordProvider serde = jsonMapper.readValue(jsonMapper.writeValueAsString(provider), PasswordProvider.class); + PasswordProvider serde = JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(provider), PasswordProvider.class); Assert.assertEquals(provider, serde); } } diff --git a/core/src/test/java/org/apache/druid/metadata/MetadataStorageConnectorConfigTest.java b/core/src/test/java/org/apache/druid/metadata/MetadataStorageConnectorConfigTest.java index 2999671500e..c5e401ebb74 100644 --- a/core/src/test/java/org/apache/druid/metadata/MetadataStorageConnectorConfigTest.java +++ b/core/src/test/java/org/apache/druid/metadata/MetadataStorageConnectorConfigTest.java @@ -39,7 +39,7 @@ public class MetadataStorageConnectorConfigTest ) throws IOException { - return jsonMapper.readValue( + return JSON_MAPPER.readValue( "{" + "\"createTables\": \"" + createTables + "\"," + "\"host\": \"" + host + "\"," + @@ -79,7 +79,7 @@ public class MetadataStorageConnectorConfigTest Assert.assertTrue(metadataStorageConnectorConfig.hashCode() == metadataStorageConnectorConfig2.hashCode()); } - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Test public void testMetadataStorageConnectionConfigSimplePassword() throws Exception @@ -119,7 +119,7 @@ public class MetadataStorageConnectorConfigTest String pwd ) throws Exception { - MetadataStorageConnectorConfig config = jsonMapper.readValue( + MetadataStorageConnectorConfig config = JSON_MAPPER.readValue( "{" + "\"createTables\": \"" + createTables + "\"," + "\"host\": \"" + host + "\"," + @@ -162,7 +162,7 @@ public class MetadataStorageConnectorConfigTest String pwd ) throws Exception { - MetadataStorageConnectorConfig config = jsonMapper.readValue( + MetadataStorageConnectorConfig config = JSON_MAPPER.readValue( "{" + "\"createTables\": \"" + createTables + "\"," + "\"host\": \"" + host + "\"," + diff --git a/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java b/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java index bc3cf902b9e..46958b3d05a 100644 --- a/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java +++ b/core/src/test/java/org/apache/druid/timeline/DataSegmentTest.java @@ -51,7 +51,7 @@ import java.util.TreeSet; */ public class DataSegmentTest { - private static final ObjectMapper mapper = new TestObjectMapper(); + private static final ObjectMapper MAPPER = new TestObjectMapper(); private static final int TEST_VERSION = 0x9; private static ShardSpec getShardSpec(final int partitionNum) @@ -107,7 +107,7 @@ public class DataSegmentTest { InjectableValues.Std injectableValues = new InjectableValues.Std(); injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT); - mapper.setInjectableValues(injectableValues); + MAPPER.setInjectableValues(injectableValues); } @Test @@ -129,8 +129,8 @@ public class DataSegmentTest 1 ); - final Map objectMap = mapper.readValue( - mapper.writeValueAsString(segment), + final Map objectMap = MAPPER.readValue( + MAPPER.writeValueAsString(segment), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); @@ -145,7 +145,7 @@ public class DataSegmentTest Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion")); Assert.assertEquals(1, objectMap.get("size")); - DataSegment deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); + DataSegment deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class); Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource()); Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval()); @@ -157,13 +157,13 @@ public class DataSegmentTest Assert.assertEquals(segment.getSize(), deserializedSegment.getSize()); Assert.assertEquals(segment.getId(), deserializedSegment.getId()); - deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); + deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class); Assert.assertEquals(0, segment.compareTo(deserializedSegment)); - deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); + deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class); Assert.assertEquals(0, deserializedSegment.compareTo(segment)); - deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); + deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class); Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode()); } @@ -224,7 +224,7 @@ public class DataSegmentTest .version(DateTimes.of("2012-01-01T11:22:33.444Z").toString()) .build(); - final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class); + final DataSegment segment2 = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class); Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions()); Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics()); } diff --git a/core/src/test/java/org/apache/druid/timeline/SegmentWithOvershadowedStatusTest.java b/core/src/test/java/org/apache/druid/timeline/SegmentWithOvershadowedStatusTest.java index 050f9e04934..4e69b4e8fe2 100644 --- a/core/src/test/java/org/apache/druid/timeline/SegmentWithOvershadowedStatusTest.java +++ b/core/src/test/java/org/apache/druid/timeline/SegmentWithOvershadowedStatusTest.java @@ -43,7 +43,7 @@ import java.util.Map; public class SegmentWithOvershadowedStatusTest { - private static final ObjectMapper mapper = new TestObjectMapper(); + private static final ObjectMapper MAPPER = new TestObjectMapper(); private static final int TEST_VERSION = 0x9; @Before @@ -51,7 +51,7 @@ public class SegmentWithOvershadowedStatusTest { InjectableValues.Std injectableValues = new InjectableValues.Std(); injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT); - mapper.setInjectableValues(injectableValues); + MAPPER.setInjectableValues(injectableValues); } @Test @@ -74,8 +74,8 @@ public class SegmentWithOvershadowedStatusTest final SegmentWithOvershadowedStatus segment = new SegmentWithOvershadowedStatus(dataSegment, false); - final Map objectMap = mapper.readValue( - mapper.writeValueAsString(segment), + final Map objectMap = MAPPER.readValue( + MAPPER.writeValueAsString(segment), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); @@ -91,9 +91,9 @@ public class SegmentWithOvershadowedStatusTest Assert.assertEquals(1, objectMap.get("size")); Assert.assertEquals(false, objectMap.get("overshadowed")); - final String json = mapper.writeValueAsString(segment); + final String json = MAPPER.writeValueAsString(segment); - final TestSegmentWithOvershadowedStatus deserializedSegment = mapper.readValue( + final TestSegmentWithOvershadowedStatus deserializedSegment = MAPPER.readValue( json, TestSegmentWithOvershadowedStatus.class ); diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java index 8884754a425..96f8d5e2016 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -39,8 +39,8 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport { private static final String SEGMENT_FILE_NAME = "segment"; - private static final String containerName = "container"; - private static final String blobPath = "/path/to/storage/index.zip"; + private static final String CONTAINER_NAME = "container"; + private static final String BLOB_PATH = "/path/to/storage/index.zip"; private AzureStorage azureStorage; @Before @@ -58,13 +58,13 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport try { final InputStream zipStream = new FileInputStream(pulledFile); - EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); + EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream); replayAll(); AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); - FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir); + FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, toDir); File expected = new File(toDir, SEGMENT_FILE_NAME); Assert.assertEquals(value.length(), result.size()); @@ -86,7 +86,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport final File outDir = Files.createTempDirectory("druid").toFile(); try { - EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( + EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andThrow( new URISyntaxException( "error", "error", @@ -98,7 +98,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage); - puller.getSegmentFiles(containerName, blobPath, outDir); + puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir); Assert.assertFalse(outDir.exists()); diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java index caa01e41ae1..2f1867df7d6 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -50,13 +50,13 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); - private static final String containerName = "container"; - private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; - private static final DataSegment dataSegment = new DataSegment( + private static final String CONTAINER_NAME = "container"; + private static final String BLOB_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; + private static final DataSegment DATA_SEGMENT = new DataSegment( "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("containerName", containerName, "blobPath", blobPath), + ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH), null, null, NoneShardSpec.instance(), @@ -129,8 +129,8 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport { AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig); - final String storageDir = pusher.getStorageDir(dataSegment, false); - final String azurePath = pusher.getAzurePath(dataSegment, false); + final String storageDir = pusher.getStorageDir(DATA_SEGMENT, false); + final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false); Assert.assertEquals( StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), @@ -144,15 +144,15 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig); final int binaryVersion = 9; final File compressedSegmentData = new File("index.zip"); - final String azurePath = pusher.getAzurePath(dataSegment, false); + final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false); - azureStorage.uploadBlob(compressedSegmentData, containerName, azurePath); + azureStorage.uploadBlob(compressedSegmentData, CONTAINER_NAME, azurePath); EasyMock.expectLastCall(); replayAll(); DataSegment pushedDataSegment = pusher.uploadDataSegment( - dataSegment, + DATA_SEGMENT, binaryVersion, 0, // empty file compressedSegmentData, @@ -180,7 +180,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport public void storageDirContainsNoColonsTest() { AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig); - DataSegment withColons = dataSegment.withVersion("2018-01-05T14:54:09.295Z"); + DataSegment withColons = DATA_SEGMENT.withVersion("2018-01-05T14:54:09.295Z"); String segmentPath = pusher.getStorageDir(withColons, false); Assert.assertFalse("Path should not contain any columns", segmentPath.contains(":")); } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java index 0255923c7e6..d985901832c 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java @@ -39,10 +39,10 @@ import java.nio.charset.StandardCharsets; public class AzureTaskLogsTest extends EasyMockSupport { - private static final String container = "test"; - private static final String prefix = "test/log"; - private static final String taskid = "taskid"; - private static final AzureTaskLogsConfig azureTaskLogsConfig = new AzureTaskLogsConfig(container, prefix, 3); + private static final String CONTAINER = "test"; + private static final String PREFIX = "test/log"; + private static final String TASK_ID = "taskid"; + private static final AzureTaskLogsConfig AZURE_TASK_LOGS_CONFIG = new AzureTaskLogsConfig(CONTAINER, PREFIX, 3); private AzureStorage azureStorage; private AzureTaskLogs azureTaskLogs; @@ -51,7 +51,7 @@ public class AzureTaskLogsTest extends EasyMockSupport public void before() { azureStorage = createMock(AzureStorage.class); - azureTaskLogs = new AzureTaskLogs(azureTaskLogsConfig, azureStorage); + azureTaskLogs = new AzureTaskLogs(AZURE_TASK_LOGS_CONFIG, azureStorage); } @@ -63,12 +63,12 @@ public class AzureTaskLogsTest extends EasyMockSupport try { final File logFile = new File(tmpDir, "log"); - azureStorage.uploadBlob(logFile, container, prefix + "/" + taskid + "/log"); + azureStorage.uploadBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log"); EasyMock.expectLastCall(); replayAll(); - azureTaskLogs.pushTaskLog(taskid, logFile); + azureTaskLogs.pushTaskLog(TASK_ID, logFile); verifyAll(); } @@ -82,16 +82,16 @@ public class AzureTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String blobPath = prefix + "/" + taskid + "/log"; - EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + final String blobPath = PREFIX + "/" + TASK_ID + "/log"; + EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn( new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); replayAll(); - final Optional byteSource = azureTaskLogs.streamTaskLog(taskid, 0); + final Optional byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 0); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); @@ -105,16 +105,16 @@ public class AzureTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String blobPath = prefix + "/" + taskid + "/log"; - EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + final String blobPath = PREFIX + "/" + TASK_ID + "/log"; + EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn( new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); replayAll(); - final Optional byteSource = azureTaskLogs.streamTaskLog(taskid, 5); + final Optional byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 5); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); @@ -128,16 +128,16 @@ public class AzureTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String blobPath = prefix + "/" + taskid + "/log"; - EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + final String blobPath = PREFIX + "/" + TASK_ID + "/log"; + EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn( new ByteArrayInputStream(StringUtils.toUtf8(testLog))); replayAll(); - final Optional byteSource = azureTaskLogs.streamTaskLog(taskid, -3); + final Optional byteSource = azureTaskLogs.streamTaskLog(TASK_ID, -3); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java index 540e5b234c9..aa0833ce9f2 100644 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/ConciseBitMapFactory.java @@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap; public class ConciseBitMapFactory implements BitMapFactory { - private static final BitmapFactory bitmapFactory = new ConciseBitmapFactory(); + private static final BitmapFactory BITMAP_FACTORY = new ConciseBitmapFactory(); public ConciseBitMapFactory() { @@ -34,7 +34,7 @@ public class ConciseBitMapFactory implements BitMapFactory @Override public MutableBitmap makeEmptyMutableBitmap() { - return bitmapFactory.makeEmptyMutableBitmap(); + return BITMAP_FACTORY.makeEmptyMutableBitmap(); } @Override diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java index 0d17755be8f..2b4992c8180 100644 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/JavaBitMapFactory.java @@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap; public class JavaBitMapFactory implements BitMapFactory { - private static final BitmapFactory bitmapFactory = new BitSetBitmapFactory(); + private static final BitmapFactory BITMAP_FACTORY = new BitSetBitmapFactory(); public JavaBitMapFactory() { @@ -34,7 +34,7 @@ public class JavaBitMapFactory implements BitMapFactory @Override public MutableBitmap makeEmptyMutableBitmap() { - return bitmapFactory.makeEmptyMutableBitmap(); + return BITMAP_FACTORY.makeEmptyMutableBitmap(); } @Override diff --git a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java index f837b3ef209..48b9db19531 100644 --- a/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java +++ b/extensions-contrib/distinctcount/src/main/java/org/apache/druid/query/aggregation/distinctcount/RoaringBitMapFactory.java @@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.RoaringBitmapFactory; public class RoaringBitMapFactory implements BitMapFactory { - private static final BitmapFactory bitmapFactory = new RoaringBitmapFactory(); + private static final BitmapFactory BITMAP_FACTORY = new RoaringBitmapFactory(); public RoaringBitMapFactory() { @@ -34,7 +34,7 @@ public class RoaringBitMapFactory implements BitMapFactory @Override public MutableBitmap makeEmptyMutableBitmap() { - return bitmapFactory.makeEmptyMutableBitmap(); + return BITMAP_FACTORY.makeEmptyMutableBitmap(); } @Override diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java index 96e7148d551..4ea3062c7f6 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountGroupByQueryTest.java @@ -113,20 +113,20 @@ public class DistinctCountGroupByQueryTest ); GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimensions(new DefaultDimensionSpec( client_type, client_type )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)), 10 ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new DistinctCountAggregatorFactory("UV", visitor_id, null)) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null)) .build(); final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null); diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java index 127fa9be493..bb911e818b9 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTimeseriesQueryTest.java @@ -86,12 +86,12 @@ public class DistinctCountTimeseriesQueryTest ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null) ) ) diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index 9d3d32689f4..40280bcf2f3 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -115,14 +115,14 @@ public class DistinctCountTopNQueryTest ) ); - TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimension(client_type) .metric("UV") .threshold(10) .aggregators( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null) ) .build(); diff --git a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java index 345f4bfd7c6..5ba45af10b9 100644 --- a/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java +++ b/extensions-contrib/materialized-view-selection/src/main/java/org/apache/druid/query/materializedview/DerivativeDataSourceManager.java @@ -69,7 +69,7 @@ import java.util.stream.Collectors; public class DerivativeDataSourceManager { private static final EmittingLogger log = new EmittingLogger(DerivativeDataSourceManager.class); - private static final AtomicReference>> derivativesRef = + private static final AtomicReference>> DERIVATIVES_REF = new AtomicReference<>(new ConcurrentHashMap<>()); private final MaterializedViewConfig config; private final Supplier dbTables; @@ -137,7 +137,7 @@ public class DerivativeDataSourceManager started = false; future.cancel(true); future = null; - derivativesRef.set(new ConcurrentHashMap<>()); + DERIVATIVES_REF.set(new ConcurrentHashMap<>()); exec.shutdownNow(); exec = null; } @@ -145,12 +145,12 @@ public class DerivativeDataSourceManager public static ImmutableSet getDerivatives(String datasource) { - return ImmutableSet.copyOf(derivativesRef.get().getOrDefault(datasource, new TreeSet<>())); + return ImmutableSet.copyOf(DERIVATIVES_REF.get().getOrDefault(datasource, new TreeSet<>())); } public static ImmutableMap> getAllDerivatives() { - return ImmutableMap.copyOf(derivativesRef.get()); + return ImmutableMap.copyOf(DERIVATIVES_REF.get()); } private void updateDerivatives() @@ -205,8 +205,8 @@ public class DerivativeDataSourceManager } ConcurrentHashMap> current; do { - current = derivativesRef.get(); - } while (!derivativesRef.compareAndSet(current, newDerivatives)); + current = DERIVATIVES_REF.get(); + } while (!DERIVATIVES_REF.compareAndSet(current, newDerivatives)); } /** diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java index ee136c25ca2..bd070e3b24d 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java @@ -203,7 +203,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase // build user query TopNQuery userQuery = new TopNQueryBuilder() .dataSource("base") - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("dim1") .metric("cost") .threshold(4) @@ -214,7 +214,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase List expectedQueryAfterOptimizing = Lists.newArrayList( new TopNQueryBuilder() .dataSource("derivative") - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("dim1") .metric("cost") .threshold(4) @@ -223,7 +223,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase .build(), new TopNQueryBuilder() .dataSource("base") - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("dim1") .metric("cost") .threshold(4) diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java index e3974eecb63..cff9ddcb8dd 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryQueryToolChestTest.java @@ -44,10 +44,10 @@ public class MaterializedViewQueryQueryToolChestTest public void testMakePostComputeManipulatorFn() { TimeseriesQuery realQuery = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.rowsCount) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.ROWS_COUNT) .descending(true) .build(); MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null); @@ -87,7 +87,7 @@ public class MaterializedViewQueryQueryToolChestTest Assert.assertEquals(postResult.getTimestamp(), result.getTimestamp()); Assert.assertEquals(postResultMap.size(), 2); - Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.rowsCount.getName()), "metricvalue1"); + Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.ROWS_COUNT.getName()), "metricvalue1"); Assert.assertEquals(postResultMap.get("dim1"), "dimvalue1"); } } diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java index 9936d2abab1..1432f519ef7 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java @@ -43,15 +43,15 @@ import java.io.IOException; public class MaterializedViewQueryTest { - private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); private DataSourceOptimizer optimizer; @Before public void setUp() { - jsonMapper.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE)); + JSON_MAPPER.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE)); optimizer = EasyMock.createMock(DataSourceOptimizer.class); - jsonMapper.setInjectableValues( + JSON_MAPPER.setInjectableValues( new InjectableValues.Std() .addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE) .addValue(DataSourceOptimizer.class, optimizer) @@ -62,16 +62,16 @@ public class MaterializedViewQueryTest public void testQuerySerialization() throws IOException { TopNQuery topNQuery = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -79,14 +79,14 @@ public class MaterializedViewQueryTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); - Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.dataSource), query.getDataSource()); - Assert.assertEquals(QueryRunnerTestHelper.allGran, query.getGranularity()); - Assert.assertEquals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(), query.getIntervals()); + Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource()); + Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity()); + Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals()); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java index e6de033e671..f7134c39bee 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java @@ -60,22 +60,22 @@ public class MovingAverageIterableTest private static final String AGE = "age"; private static final String COUNTRY = "country"; - private static final Map dims1 = new HashMap<>(); - private static final Map dims2 = new HashMap<>(); - private static final Map dims3 = new HashMap<>(); + private static final Map DIMS1 = new HashMap<>(); + private static final Map DIMS2 = new HashMap<>(); + private static final Map DIMS3 = new HashMap<>(); static { - dims1.put(GENDER, "m"); - dims1.put(AGE, "10"); - dims1.put(COUNTRY, "US"); + DIMS1.put(GENDER, "m"); + DIMS1.put(AGE, "10"); + DIMS1.put(COUNTRY, "US"); - dims2.put(GENDER, "f"); - dims2.put(AGE, "8"); - dims2.put(COUNTRY, "US"); + DIMS2.put(GENDER, "f"); + DIMS2.put(AGE, "8"); + DIMS2.put(COUNTRY, "US"); - dims3.put(GENDER, "u"); - dims3.put(AGE, "5"); - dims3.put(COUNTRY, "UK"); + DIMS3.put(GENDER, "u"); + DIMS3.put(AGE, "5"); + DIMS3.put(COUNTRY, "UK"); } @Test @@ -90,16 +90,16 @@ public class MovingAverageIterableTest Sequence dayBuckets = Sequences.simple(Arrays.asList( new RowBucket(JAN_1, Arrays.asList( - new MapBasedRow(JAN_1, dims1), - new MapBasedRow(JAN_1, dims2) + new MapBasedRow(JAN_1, DIMS1), + new MapBasedRow(JAN_1, DIMS2) )), new RowBucket(JAN_2, Collections.singletonList( - new MapBasedRow(JAN_2, dims1) + new MapBasedRow(JAN_2, DIMS1) )), new RowBucket(JAN_3, Collections.emptyList()), new RowBucket(JAN_4, Arrays.asList( - new MapBasedRow(JAN_4, dims2), - new MapBasedRow(JAN_4, dims3) + new MapBasedRow(JAN_4, DIMS2), + new MapBasedRow(JAN_4, DIMS3) )) )); diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java index 01631e3639c..5bd21f6ed6a 100644 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnGroupByTest.java @@ -125,7 +125,7 @@ public class MapVirtualColumnGroupByTest public void testWithMapColumn() { final GroupByQuery query = new GroupByQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, @@ -148,7 +148,7 @@ public class MapVirtualColumnGroupByTest public void testWithSubColumn() { final GroupByQuery query = new GroupByQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))), VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))), null, diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnSelectTest.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnSelectTest.java index 9139dfaf091..1f6155a254b 100644 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnSelectTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnSelectTest.java @@ -133,9 +133,9 @@ public class MapVirtualColumnSelectTest private Druids.SelectQueryBuilder testBuilder() { return Druids.newSelectQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .pagingSpec(new PagingSpec(null, 3)); } @@ -197,7 +197,7 @@ public class MapVirtualColumnSelectTest Assert.assertEquals(expected.size(), events.size()); for (int i = 0; i < events.size(); i++) { Map event = events.get(i).getEvent(); - event.remove(EventHolder.timestampKey); + event.remove(EventHolder.TIMESTAMP_KEY); Assert.assertEquals(expected.get(i), event); } } diff --git a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java index af2368bdea5..38df988e92c 100644 --- a/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java +++ b/extensions-contrib/virtual-columns/src/test/java/org/apache/druid/segment/MapVirtualColumnTopNTest.java @@ -85,7 +85,7 @@ public class MapVirtualColumnTopNTest public void testWithMapColumn() { final TopNQuery query = new TopNQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create( ImmutableList.of( new MapVirtualColumn("keys", "values", "params") @@ -111,7 +111,7 @@ public class MapVirtualColumnTopNTest public void testWithSubColumn() { final TopNQuery query = new TopNQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), VirtualColumns.create( ImmutableList.of( new MapVirtualColumn("keys", "values", "params") diff --git a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchComplexMetricSerde.java b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchComplexMetricSerde.java index 997b32461dc..8eb9dfd0d76 100644 --- a/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchComplexMetricSerde.java +++ b/extensions-core/datasketches/src/main/java/org/apache/druid/query/aggregation/datasketches/quantiles/DoublesSketchComplexMetricSerde.java @@ -39,7 +39,7 @@ import java.nio.ByteBuffer; public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde { - private static final DoublesSketchObjectStrategy strategy = new DoublesSketchObjectStrategy(); + private static final DoublesSketchObjectStrategy STRATEGY = new DoublesSketchObjectStrategy(); @Override public String getTypeName() @@ -50,7 +50,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde @Override public ObjectStrategy getObjectStrategy() { - return strategy; + return STRATEGY; } @Override @@ -105,7 +105,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde @Override public void deserializeColumn(final ByteBuffer buffer, final ColumnBuilder builder) { - final GenericIndexed column = GenericIndexed.read(buffer, strategy, builder.getFileMapper()); + final GenericIndexed column = GenericIndexed.read(buffer, STRATEGY, builder.getFileMapper()); builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column)); } diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java index da4479beca9..5fedc844166 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterAggregatorTest.java @@ -57,14 +57,14 @@ import java.util.stream.IntStream; public class BloomFilterAggregatorTest { - private static final String nullish = NullHandling.replaceWithDefault() ? "" : null; - private static final List values1 = dimensionValues( + private static final String NULLISH = NullHandling.replaceWithDefault() ? "" : null; + private static final List VALUES1 = dimensionValues( "a", "b", "c", "a", "a", - nullish, + NULLISH, "b", "b", "b", @@ -72,7 +72,7 @@ public class BloomFilterAggregatorTest "a", "a" ); - private static final List values2 = dimensionValues( + private static final List VALUES2 = dimensionValues( "a", "b", "c", @@ -80,17 +80,17 @@ public class BloomFilterAggregatorTest "a", "e", "b", - new String[]{nullish, "x"}, - new String[]{"x", nullish}, + new String[]{NULLISH, "x"}, + new String[]{"x", NULLISH}, new String[]{"y", "x"}, new String[]{"x", "y"}, new String[]{"x", "y", "a"} ); - private static final Double[] doubleValues1 = new Double[]{0.1, 1.5, 18.3, 0.1}; - private static final Float[] floatValues1 = new Float[]{0.4f, 0.8f, 23.2f}; - private static final Long[] longValues1 = new Long[]{10241L, 12312355L, 0L, 81L}; + private static final Double[] DOUBLE_VALUES1 = new Double[]{0.1, 1.5, 18.3, 0.1}; + private static final Float[] FLOAT_VALUES1 = new Float[]{0.4f, 0.8f, 23.2f}; + private static final Long[] LONG_VALUES1 = new Long[]{10241L, 12312355L, 0L, 81L}; - private static final int maxNumValues = 15; + private static final int MAX_NUM_VALUES = 15; private static BloomKFilter filter1; private static BloomKFilter filter2; @@ -104,31 +104,31 @@ public class BloomFilterAggregatorTest static { try { - filter1 = new BloomKFilter(maxNumValues); - filter2 = new BloomKFilter(maxNumValues); - BloomKFilter combinedValuesFilter = new BloomKFilter(maxNumValues); + filter1 = new BloomKFilter(MAX_NUM_VALUES); + filter2 = new BloomKFilter(MAX_NUM_VALUES); + BloomKFilter combinedValuesFilter = new BloomKFilter(MAX_NUM_VALUES); - createStringFilter(values1, filter1, combinedValuesFilter); - createStringFilter(values2, filter2, combinedValuesFilter); + createStringFilter(VALUES1, filter1, combinedValuesFilter); + createStringFilter(VALUES2, filter2, combinedValuesFilter); serializedFilter1 = filterToString(filter1); serializedFilter2 = filterToString(filter2); serializedCombinedFilter = filterToString(combinedValuesFilter); - BloomKFilter longFilter = new BloomKFilter(maxNumValues); - for (long val : longValues1) { + BloomKFilter longFilter = new BloomKFilter(MAX_NUM_VALUES); + for (long val : LONG_VALUES1) { longFilter.addLong(val); } serializedLongFilter = filterToString(longFilter); - BloomKFilter floatFilter = new BloomKFilter(maxNumValues); - for (float val : floatValues1) { + BloomKFilter floatFilter = new BloomKFilter(MAX_NUM_VALUES); + for (float val : FLOAT_VALUES1) { floatFilter.addFloat(val); } serializedFloatFilter = filterToString(floatFilter); - BloomKFilter doubleFilter = new BloomKFilter(maxNumValues); - for (double val : doubleValues1) { + BloomKFilter doubleFilter = new BloomKFilter(MAX_NUM_VALUES); + for (double val : DOUBLE_VALUES1) { doubleFilter.addDouble(val); } serializedDoubleFilter = filterToString(doubleFilter); @@ -232,7 +232,7 @@ public class BloomFilterAggregatorTest valueAggregatorFactory = new BloomFilterAggregatorFactory( "billy", dimSpec, - maxNumValues + MAX_NUM_VALUES ); } @@ -240,10 +240,10 @@ public class BloomFilterAggregatorTest @Test public void testAggregateValues() throws IOException { - DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values1, null); - StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true); + DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null); + StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregateDimension(Collections.singletonList(dimSelector), agg); } @@ -257,10 +257,10 @@ public class BloomFilterAggregatorTest @Test public void testAggregateLongValues() throws IOException { - TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1)); - LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true); + TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1)); + LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true); - for (Long ignored : longValues1) { + for (Long ignored : LONG_VALUES1) { aggregateColumn(Collections.singletonList(selector), agg); } @@ -274,10 +274,10 @@ public class BloomFilterAggregatorTest @Test public void testAggregateFloatValues() throws IOException { - TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1)); - FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true); + TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1)); + FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true); - for (Float ignored : floatValues1) { + for (Float ignored : FLOAT_VALUES1) { aggregateColumn(Collections.singletonList(selector), agg); } @@ -291,10 +291,10 @@ public class BloomFilterAggregatorTest @Test public void testAggregateDoubleValues() throws IOException { - TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1)); - DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true); + TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1)); + DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true); - for (Double ignored : doubleValues1) { + for (Double ignored : DOUBLE_VALUES1) { aggregateColumn(Collections.singletonList(selector), agg); } @@ -308,8 +308,8 @@ public class BloomFilterAggregatorTest @Test public void testBufferAggregateStringValues() throws IOException { - DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values2, null); - StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true); + DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null); + StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true); int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls(); ByteBuffer buf = ByteBuffer.allocate(maxSize + 64); @@ -318,7 +318,7 @@ public class BloomFilterAggregatorTest agg.init(buf, pos); - for (int i = 0; i < values2.size(); ++i) { + for (int i = 0; i < VALUES2.size(); ++i) { bufferAggregateDimension(Collections.singletonList(dimSelector), agg, buf, pos); } BloomKFilter bloomKFilter = BloomKFilter.deserialize( @@ -331,8 +331,8 @@ public class BloomFilterAggregatorTest @Test public void testBufferAggregateLongValues() throws IOException { - TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1)); - LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true); + TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1)); + LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true); int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls(); ByteBuffer buf = ByteBuffer.allocate(maxSize + 64); @@ -341,7 +341,7 @@ public class BloomFilterAggregatorTest agg.init(buf, pos); - IntStream.range(0, longValues1.length) + IntStream.range(0, LONG_VALUES1.length) .forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos)); BloomKFilter bloomKFilter = BloomKFilter.deserialize( (ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)) @@ -353,8 +353,8 @@ public class BloomFilterAggregatorTest @Test public void testBufferAggregateFloatValues() throws IOException { - TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1)); - FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true); + TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1)); + FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true); int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls(); ByteBuffer buf = ByteBuffer.allocate(maxSize + 64); @@ -363,7 +363,7 @@ public class BloomFilterAggregatorTest agg.init(buf, pos); - IntStream.range(0, floatValues1.length) + IntStream.range(0, FLOAT_VALUES1.length) .forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos)); BloomKFilter bloomKFilter = BloomKFilter.deserialize( (ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)) @@ -375,8 +375,8 @@ public class BloomFilterAggregatorTest @Test public void testBufferAggregateDoubleValues() throws IOException { - TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1)); - DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true); + TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1)); + DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true); int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls(); ByteBuffer buf = ByteBuffer.allocate(maxSize + 64); @@ -385,7 +385,7 @@ public class BloomFilterAggregatorTest agg.init(buf, pos); - IntStream.range(0, doubleValues1.length) + IntStream.range(0, DOUBLE_VALUES1.length) .forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos)); BloomKFilter bloomKFilter = BloomKFilter.deserialize( (ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)) @@ -397,16 +397,16 @@ public class BloomFilterAggregatorTest @Test public void testCombineValues() throws IOException { - DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(values1, null); - DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(values2, null); + DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null); + DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null); - StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, maxNumValues, true); - StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, maxNumValues, true); + StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, MAX_NUM_VALUES, true); + StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, MAX_NUM_VALUES, true); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregateDimension(Collections.singletonList(dimSelector1), agg1); } - for (int i = 0; i < values2.size(); ++i) { + for (int i = 0; i < VALUES2.size(); ++i) { aggregateDimension(Collections.singletonList(dimSelector2), agg2); } @@ -435,7 +435,7 @@ public class BloomFilterAggregatorTest ); BloomFilterMergeAggregator mergeAggregator = - new BloomFilterMergeAggregator(mergeDim, maxNumValues, true); + new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true); for (int i = 0; i < 2; ++i) { aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator); @@ -461,7 +461,7 @@ public class BloomFilterAggregatorTest ); BloomFilterMergeAggregator mergeAggregator = - new BloomFilterMergeAggregator(mergeDim, maxNumValues, true); + new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true); for (int i = 0; i < 2; ++i) { aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator); @@ -486,7 +486,7 @@ public class BloomFilterAggregatorTest ) ); - BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, maxNumValues, false); + BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, false); int maxSize = valueAggregatorFactory.getCombiningFactory().getMaxIntermediateSizeWithNulls(); ByteBuffer buf = ByteBuffer.allocate(maxSize + 64); @@ -513,7 +513,7 @@ public class BloomFilterAggregatorTest BloomFilterAggregatorFactory factory = new BloomFilterAggregatorFactory( "billy", new DefaultDimensionSpec("b", "b"), - maxNumValues + MAX_NUM_VALUES ); ObjectMapper objectMapper = new DefaultObjectMapper(); new BloomFilterExtensionModule().getJacksonModules().forEach(objectMapper::registerModule); @@ -536,7 +536,7 @@ public class BloomFilterAggregatorTest BloomFilterAggregatorFactory factory2 = new BloomFilterAggregatorFactory( "billy", new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)), - maxNumValues + MAX_NUM_VALUES ); Assert.assertEquals( @@ -547,7 +547,7 @@ public class BloomFilterAggregatorTest BloomFilterAggregatorFactory factory3 = new BloomFilterAggregatorFactory( "billy", new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"), - maxNumValues + MAX_NUM_VALUES ); Assert.assertEquals( factory3, diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterGroupByQueryTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterGroupByQueryTest.java index 003dc27ca58..2a2de448fc0 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterGroupByQueryTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/BloomFilterGroupByQueryTest.java @@ -53,13 +53,13 @@ import java.util.List; @RunWith(Parameterized.class) public class BloomFilterGroupByQueryTest { - private static final BloomFilterExtensionModule module = new BloomFilterExtensionModule(); + private static final BloomFilterExtensionModule MODULE = new BloomFilterExtensionModule(); static { // throwaway, just using to properly initialize jackson modules Guice.createInjector( binder -> binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()), - module + MODULE ); } @@ -72,7 +72,7 @@ public class BloomFilterGroupByQueryTest public BloomFilterGroupByQueryTest(final GroupByQueryConfig config) { helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper( - Lists.newArrayList(module.getJacksonModules()), + Lists.newArrayList(MODULE.getJacksonModules()), config, tempFolder ); diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java index f8f648952d5..9f5c624bde1 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/aggregation/bloom/sql/BloomFilterSqlAggregatorTest.java @@ -96,7 +96,7 @@ public class BloomFilterSqlAggregatorTest { private static final int TEST_NUM_ENTRIES = 1000; private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT; - private static final Injector injector = Guice.createInjector( + private static final Injector INJECTOR = Guice.createInjector( binder -> { binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()); binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance( @@ -111,7 +111,7 @@ public class BloomFilterSqlAggregatorTest ); private static ObjectMapper jsonMapper = - injector + INJECTOR .getInstance(Key.get(ObjectMapper.class, Json.class)) .registerModules(Collections.singletonList(new BloomFilterSerializersModule())); diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java index fd9f5f5b667..aa717a07e2e 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/sql/BloomDimFilterSqlTest.java @@ -64,7 +64,7 @@ import java.util.Map; public class BloomDimFilterSqlTest extends BaseCalciteQueryTest { - private static final Injector injector = Guice.createInjector( + private static final Injector INJECTOR = Guice.createInjector( binder -> { binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()); binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance( @@ -80,7 +80,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest ); private static ObjectMapper jsonMapper = - injector + INJECTOR .getInstance(Key.get(ObjectMapper.class, Json.class)) .registerModules(Collections.singletonList(new BloomFilterSerializersModule())); @@ -88,10 +88,10 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest { final List exprMacros = new ArrayList<>(); for (Class clazz : ExpressionModule.EXPR_MACROS) { - exprMacros.add(injector.getInstance(clazz)); + exprMacros.add(INJECTOR.getInstance(clazz)); } - exprMacros.add(injector.getInstance(BloomFilterExprMacro.class)); - exprMacros.add(injector.getInstance(LookupExprMacro.class)); + exprMacros.add(INJECTOR.getInstance(BloomFilterExprMacro.class)); + exprMacros.add(INJECTOR.getInstance(LookupExprMacro.class)); return new ExprMacroTable(exprMacros); } @@ -278,7 +278,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest { final DruidOperatorTable operatorTable = new DruidOperatorTable( ImmutableSet.of(), - ImmutableSet.of(injector.getInstance(BloomFilterOperatorConversion.class)) + ImmutableSet.of(INJECTOR.getInstance(BloomFilterOperatorConversion.class)) ); return getResults( plannerConfig, diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java index 245fde24a27..d25b7ed7227 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramGroupByQueryTest.java @@ -52,7 +52,7 @@ import java.util.List; @RunWith(Parameterized.class) public class ApproximateHistogramGroupByQueryTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); private final QueryRunner runner; private final GroupByQueryRunnerFactory factory; @@ -124,7 +124,7 @@ public class ApproximateHistogramGroupByQueryTest config ); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; - resourceCloser.register(factoryAndCloser.rhs); + RESOURCE_CLOSER.register(factoryAndCloser.rhs); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { final String testName = StringUtils.format( "config=%s, runner=%s", @@ -152,7 +152,7 @@ public class ApproximateHistogramGroupByQueryTest @After public void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Test @@ -169,18 +169,18 @@ public class ApproximateHistogramGroupByQueryTest ); GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "apphisto", 0.5f) @@ -230,18 +230,18 @@ public class ApproximateHistogramGroupByQueryTest ); GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "quantile", 0.5f) diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java index 28806afbdd1..fc07a29f6c8 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/ApproximateHistogramTopNQueryTest.java @@ -54,12 +54,12 @@ import java.util.Map; @RunWith(Parameterized.class) public class ApproximateHistogramTopNQueryTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Parameterized.Parameters(name = "{0}") @@ -70,8 +70,8 @@ public class ApproximateHistogramTopNQueryTest "TopNQueryRunnerFactory-bufferPool", () -> ByteBuffer.allocate(2000) ); - resourceCloser.register(defaultPool); - resourceCloser.register(customPool); + RESOURCE_CLOSER.register(defaultPool); + RESOURCE_CLOSER.register(customPool); return QueryRunnerTestHelper.transformToConstructionFeeder( Iterables.concat( @@ -122,16 +122,16 @@ public class ApproximateHistogramTopNQueryTest ); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"), @@ -141,8 +141,8 @@ public class ApproximateHistogramTopNQueryTest ) ) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG, new QuantilePostAggregator("quantile", "apphisto", 0.5f) ) .build(); @@ -153,7 +153,7 @@ public class ApproximateHistogramTopNQueryTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -184,7 +184,7 @@ public class ApproximateHistogramTopNQueryTest ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -215,7 +215,7 @@ public class ApproximateHistogramTopNQueryTest ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramGroupByQueryTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramGroupByQueryTest.java index 929a3469fc0..03301f5f8a6 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramGroupByQueryTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramGroupByQueryTest.java @@ -52,7 +52,7 @@ import java.util.List; @RunWith(Parameterized.class) public class FixedBucketsHistogramGroupByQueryTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); private final QueryRunner runner; private final GroupByQueryRunnerFactory factory; @@ -124,7 +124,7 @@ public class FixedBucketsHistogramGroupByQueryTest config ); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; - resourceCloser.register(factoryAndCloser.rhs); + RESOURCE_CLOSER.register(factoryAndCloser.rhs); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { final String testName = StringUtils.format( "config=%s, runner=%s", @@ -153,7 +153,7 @@ public class FixedBucketsHistogramGroupByQueryTest @After public void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Test @@ -170,18 +170,18 @@ public class FixedBucketsHistogramGroupByQueryTest ); GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnInterval) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "histo", 0.5f) @@ -231,18 +231,18 @@ public class FixedBucketsHistogramGroupByQueryTest ); GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnInterval) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 1 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory) .setPostAggregatorSpecs( Collections.singletonList( new QuantilePostAggregator("quantile", "quantile", 0.5f) diff --git a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTopNQueryTest.java b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTopNQueryTest.java index 7e12ee697ac..bab48fbbaed 100644 --- a/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTopNQueryTest.java +++ b/extensions-core/histogram/src/test/java/org/apache/druid/query/aggregation/histogram/FixedBucketsHistogramTopNQueryTest.java @@ -54,12 +54,12 @@ import java.util.Map; @RunWith(Parameterized.class) public class FixedBucketsHistogramTopNQueryTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Parameterized.Parameters(name = "{0}") @@ -70,8 +70,8 @@ public class FixedBucketsHistogramTopNQueryTest "TopNQueryRunnerFactory-bufferPool", () -> ByteBuffer.allocate(2000) ); - resourceCloser.register(defaultPool); - resourceCloser.register(customPool); + RESOURCE_CLOSER.register(defaultPool); + RESOURCE_CLOSER.register(customPool); return QueryRunnerTestHelper.transformToConstructionFeeder( Iterables.concat( @@ -122,16 +122,16 @@ public class FixedBucketsHistogramTopNQueryTest ); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"), @@ -141,8 +141,8 @@ public class FixedBucketsHistogramTopNQueryTest ) ) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG, new QuantilePostAggregator("quantile", "histo", 0.5f) ) .build(); @@ -153,7 +153,7 @@ public class FixedBucketsHistogramTopNQueryTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -180,7 +180,7 @@ public class FixedBucketsHistogramTopNQueryTest ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -207,7 +207,7 @@ public class FixedBucketsHistogramTopNQueryTest ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/TestKafkaExtractionCluster.java b/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/TestKafkaExtractionCluster.java index ac2ad60a743..f521654acc6 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/TestKafkaExtractionCluster.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/TestKafkaExtractionCluster.java @@ -64,8 +64,8 @@ import java.util.concurrent.ThreadLocalRandom; public class TestKafkaExtractionCluster { private static final Logger log = new Logger(TestKafkaExtractionCluster.class); - private static final String topicName = "testTopic"; - private static final Map kafkaProperties = new HashMap<>(); + private static final String TOPIC_NAME = "testTopic"; + private static final Map KAFKA_PROPERTIES = new HashMap<>(); @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @@ -81,7 +81,7 @@ public class TestKafkaExtractionCluster private static List> generateRecords() { return ImmutableList.of( - new ProducerRecord<>(topicName, 0, + new ProducerRecord<>(TOPIC_NAME, 0, StringUtils.toUtf8("abcdefg"), StringUtils.toUtf8("abcdefg"))); } @@ -131,7 +131,7 @@ public class TestKafkaExtractionCluster final KafkaLookupExtractorFactory kafkaLookupExtractorFactory = new KafkaLookupExtractorFactory( null, - topicName, + TOPIC_NAME, consumerProperties ); @@ -149,7 +149,7 @@ public class TestKafkaExtractionCluster @Nonnull private Map getConsumerProperties() { - final Map props = new HashMap<>(kafkaProperties); + final Map props = new HashMap<>(KAFKA_PROPERTIES); int port = kafkaServer.socketServer().config().port(); props.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port)); return props; @@ -168,7 +168,7 @@ public class TestKafkaExtractionCluster private KafkaConfig getBrokerProperties() throws IOException { final Properties serverProperties = new Properties(); - serverProperties.putAll(kafkaProperties); + serverProperties.putAll(KAFKA_PROPERTIES); serverProperties.put("broker.id", "0"); serverProperties.put("zookeeper.connect", zkServer.getConnectString()); serverProperties.put("port", String.valueOf(ThreadLocalRandom.current().nextInt(9999) + 10000)); @@ -193,13 +193,13 @@ public class TestKafkaExtractionCluster private Properties makeProducerProperties() { final Properties kafkaProducerProperties = new Properties(); - kafkaProducerProperties.putAll(kafkaProperties); + kafkaProducerProperties.putAll(KAFKA_PROPERTIES); int port = kafkaServer.socketServer().config().port(); kafkaProducerProperties.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port)); kafkaProducerProperties.put("key.serializer", ByteArraySerializer.class.getName()); kafkaProducerProperties.put("value.serializer", ByteArraySerializer.class.getName()); kafkaProducerProperties.put("acks", "all"); - kafkaProperties.put("request.required.acks", "1"); + KAFKA_PROPERTIES.put("request.required.acks", "1"); return kafkaProducerProperties; } @@ -222,7 +222,7 @@ public class TestKafkaExtractionCluster long events = factory.getCompletedEventCount(); log.info("------------------------- Sending foo bar -------------------------------"); - producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar"))); + producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar"))); long start = System.currentTimeMillis(); while (events == factory.getCompletedEventCount()) { @@ -241,7 +241,7 @@ public class TestKafkaExtractionCluster events = factory.getCompletedEventCount(); log.info("------------------------- Sending baz bat -------------------------------"); - producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat"))); + producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat"))); while (events == factory.getCompletedEventCount()) { Thread.sleep(10); if (System.currentTimeMillis() > start + 60_000) { diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java index 9a835f51c29..5090fb3ddd6 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -72,7 +72,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Rule public ExpectedException expectedException = ExpectedException.none(); - private static final ObjectMapper objectMapper = new DefaultObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper(); private static final String TEST_ID = "test-id"; private static final List TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4"); private static final String TEST_HOST = "test-host"; @@ -111,7 +111,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport response = createMock(HttpResponse.class); headers = createMock(HttpHeaders.class); - client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider); + client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider); EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) .anyTimes(); @@ -285,7 +285,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Test public void testGetCurrentOffsetsWithRetry() throws Exception { - client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3); + client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) @@ -330,7 +330,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport expectedException.expect(RuntimeException.class); expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]"); - client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); + client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2); EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes(); @@ -385,7 +385,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Test public void testGetStartTime() throws Exception { - client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); + client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2); DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java index 476a1939c4b..c4739abc4de 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaRecordSupplierTest.java @@ -54,8 +54,8 @@ public class KafkaRecordSupplierTest private static long poll_timeout_millis = 1000; private static int pollRetry = 5; private static int topicPosFix = 0; - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); + private static TestingCluster zkServer; private static TestBroker kafkaServer; @@ -126,28 +126,28 @@ public class KafkaRecordSupplierTest ); }).collect(Collectors.toList()); } - + public static class TestKafkaDeserializer implements Deserializer { @Override public void configure(Map map, boolean b) { - + } - + @Override public void close() { - + } - + @Override public byte[] deserialize(String topic, byte[] data) { return data; } } - + @BeforeClass public static void setupClass() throws Exception { @@ -194,7 +194,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); Assert.assertTrue(recordSupplier.getAssignment().isEmpty()); @@ -205,77 +205,77 @@ public class KafkaRecordSupplierTest recordSupplier.close(); } - + @Test public void testSupplierSetupCustomDeserializer() throws ExecutionException, InterruptedException { - + // Insert data insertData(); - + Set> partitions = ImmutableSet.of( StreamPartition.of(topic, 0), StreamPartition.of(topic, 1) ); - + Map properties = kafkaServer.consumerProperties(); properties.put("key.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName()); properties.put("value.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName()); - + KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( properties, - objectMapper + OBJECT_MAPPER ); - + Assert.assertTrue(recordSupplier.getAssignment().isEmpty()); - + recordSupplier.assign(partitions); - + Assert.assertEquals(partitions, recordSupplier.getAssignment()); Assert.assertEquals(ImmutableSet.of(0, 1), recordSupplier.getPartitionIds(topic)); - + recordSupplier.close(); } - + @Test public void testPollCustomDeserializer() throws InterruptedException, ExecutionException { - + // Insert data insertData(); - + Set> partitions = ImmutableSet.of( StreamPartition.of(topic, 0), StreamPartition.of(topic, 1) ); - + Map properties = kafkaServer.consumerProperties(); properties.put("key.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName()); properties.put("value.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName()); - + KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( properties, - objectMapper + OBJECT_MAPPER ); - + recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); - + List> initialRecords = new ArrayList<>(createOrderedPartitionableRecords()); - + List> polledRecords = recordSupplier.poll(poll_timeout_millis); for (int i = 0; polledRecords.size() != initialRecords.size() && i < pollRetry; i++) { polledRecords.addAll(recordSupplier.poll(poll_timeout_millis)); Thread.sleep(200); } - + Assert.assertEquals(partitions, recordSupplier.getAssignment()); Assert.assertEquals(initialRecords.size(), polledRecords.size()); Assert.assertTrue(initialRecords.containsAll(polledRecords)); - + recordSupplier.close(); } - + @Test public void testPoll() throws InterruptedException, ExecutionException { @@ -289,7 +289,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); @@ -330,7 +330,7 @@ public class KafkaRecordSupplierTest KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); @@ -401,7 +401,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); @@ -444,7 +444,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); @@ -477,7 +477,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); @@ -503,7 +503,7 @@ public class KafkaRecordSupplierTest ); KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier( - kafkaServer.consumerProperties(), objectMapper); + kafkaServer.consumerProperties(), OBJECT_MAPPER); recordSupplier.assign(partitions); recordSupplier.seekToEarliest(partitions); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaSamplerSpecTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaSamplerSpecTest.java index dce37662364..26c36c2b9be 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaSamplerSpecTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaSamplerSpecTest.java @@ -62,11 +62,11 @@ import java.util.Map; public class KafkaSamplerSpecTest { - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String TOPIC = "sampling"; private static final DataSchema DATA_SCHEMA = new DataSchema( "test_ds", - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -94,7 +94,7 @@ public class KafkaSamplerSpecTest }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), null, - objectMapper + OBJECT_MAPPER ); private static TestingCluster zkServer; @@ -167,8 +167,8 @@ public class KafkaSamplerSpecTest KafkaSamplerSpec samplerSpec = new KafkaSamplerSpec( supervisorSpec, new SamplerConfig(5, null, null, null), - new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))), - objectMapper + new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))), + OBJECT_MAPPER ); SamplerResponse response = samplerSpec.sample(); diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index 0d255b66d24..4dfa4722719 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -122,7 +122,7 @@ import java.util.concurrent.Executor; @RunWith(Parameterized.class) public class KafkaSupervisorTest extends EasyMockSupport { - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String TOPIC_PREFIX = "testTopic"; private static final String DATASOURCE = "testDS"; private static final int NUM_PARTITIONS = 3; @@ -237,7 +237,7 @@ public class KafkaSupervisorTest extends EasyMockSupport final Map contexts = supervisor.createIndexTasks( 1, "seq", - objectMapper, + OBJECT_MAPPER, new TreeMap<>(), new KafkaIndexTaskIOConfig( 0, @@ -3393,7 +3393,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KafkaSupervisorSpec( dataSchema, tuningConfig, @@ -3404,7 +3404,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -3500,7 +3500,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KafkaSupervisorSpec( dataSchema, tuningConfig, @@ -3511,7 +3511,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -3584,7 +3584,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KafkaSupervisorSpec( dataSchema, tuningConfig, @@ -3595,7 +3595,7 @@ public class KafkaSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -3613,7 +3613,7 @@ public class KafkaSupervisorTest extends EasyMockSupport return new DataSchema( dataSource, - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -3636,7 +3636,7 @@ public class KafkaSupervisorTest extends EasyMockSupport ImmutableList.of() ), null, - objectMapper + OBJECT_MAPPER ); } @@ -3717,7 +3717,7 @@ public class KafkaSupervisorTest extends EasyMockSupport null, null, rowIngestionMetersFactory, - objectMapper, + OBJECT_MAPPER, new DummyForInjectionAppenderatorsManager() ); } diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java index cc488f40104..10f19c5fd32 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java @@ -73,7 +73,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport @Rule public ExpectedException expectedException = ExpectedException.none(); - private static final ObjectMapper objectMapper = new DefaultObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper(); private static final String TEST_ID = "test-id"; private static final List TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4"); private static final String TEST_HOST = "test-host"; @@ -112,7 +112,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport response = createMock(HttpResponse.class); headers = createMock(HttpHeaders.class); - client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider); + client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider); EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) .anyTimes(); @@ -286,7 +286,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport @Test public void testGetCurrentOffsetsWithRetry() throws Exception { - client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3); + client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) @@ -331,7 +331,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport expectedException.expect(RuntimeException.class); expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]"); - client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); + client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2); EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes(); @@ -386,7 +386,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport @Test public void testGetStartTime() throws Exception { - client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); + client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2); DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java index 230432a58eb..3bb675e464e 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java @@ -53,7 +53,7 @@ import java.util.stream.Collectors; public class KinesisRecordSupplierTest extends EasyMockSupport { - private static final String stream = "stream"; + private static final String STREAM = "stream"; private static final long POLL_TIMEOUT_MILLIS = 2000; private static final String SHARD_ID1 = "1"; private static final String SHARD_ID0 = "0"; @@ -78,7 +78,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport private static final List ALL_RECORDS = ImmutableList.builder() .addAll(SHARD0_RECORDS.stream() .map(x -> new OrderedPartitionableRecord<>( - stream, + STREAM, SHARD_ID0, x.getSequenceNumber(), Collections @@ -91,7 +91,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport .toList())) .addAll(SHARD1_RECORDS.stream() .map(x -> new OrderedPartitionableRecord<>( - stream, + STREAM, SHARD_ID1, x.getSequenceNumber(), Collections @@ -182,8 +182,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, SHARD_ID0), - StreamPartition.of(stream, SHARD_ID1) + StreamPartition.of(STREAM, SHARD_ID0), + StreamPartition.of(STREAM, SHARD_ID1) ); recordSupplier = new KinesisRecordSupplier( @@ -204,13 +204,13 @@ public class KinesisRecordSupplierTest extends EasyMockSupport recordSupplier.assign(partitions); Assert.assertEquals(partitions, recordSupplier.getAssignment()); - Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(stream)); + Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(STREAM)); Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100)); verifyAll(); final DescribeStreamRequest expectedRequest = new DescribeStreamRequest(); - expectedRequest.setStreamName(stream); + expectedRequest.setStreamName(STREAM); expectedRequest.setExclusiveStartShardId("0"); Assert.assertEquals(expectedRequest, capturedRequest.getValue()); } @@ -266,8 +266,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, SHARD_ID0), - StreamPartition.of(stream, SHARD_ID1) + StreamPartition.of(STREAM, SHARD_ID0), + StreamPartition.of(STREAM, SHARD_ID1) ); @@ -338,8 +338,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); - StreamPartition shard0Partition = StreamPartition.of(stream, SHARD_ID0); - StreamPartition shard1Partition = StreamPartition.of(stream, SHARD_ID1); + StreamPartition shard0Partition = StreamPartition.of(STREAM, SHARD_ID0); + StreamPartition shard1Partition = StreamPartition.of(STREAM, SHARD_ID1); Set> partitions = ImmutableSet.of( shard0Partition, shard1Partition @@ -405,8 +405,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); - StreamPartition shard0 = StreamPartition.of(stream, SHARD_ID0); - StreamPartition shard1 = StreamPartition.of(stream, SHARD_ID1); + StreamPartition shard0 = StreamPartition.of(STREAM, SHARD_ID0); + StreamPartition shard1 = StreamPartition.of(STREAM, SHARD_ID1); Set> partitions = ImmutableSet.of( shard0, shard1 @@ -440,8 +440,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport @Test(expected = ISE.class) public void testSeekUnassigned() throws InterruptedException { - StreamPartition shard0 = StreamPartition.of(stream, SHARD_ID0); - StreamPartition shard1 = StreamPartition.of(stream, SHARD_ID1); + StreamPartition shard0 = StreamPartition.of(STREAM, SHARD_ID0); + StreamPartition shard1 = StreamPartition.of(STREAM, SHARD_ID1); Set> partitions = ImmutableSet.of( shard1 ); @@ -503,7 +503,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, SHARD_ID1) + StreamPartition.of(STREAM, SHARD_ID1) ); recordSupplier = new KinesisRecordSupplier( @@ -520,7 +520,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport ); recordSupplier.assign(partitions); - recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "5"); + recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "5"); recordSupplier.start(); for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) { @@ -534,7 +534,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport firstRecord ); - recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "7"); + recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "7"); recordSupplier.start(); while (recordSupplier.bufferSize() < 4) { @@ -585,8 +585,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, SHARD_ID0), - StreamPartition.of(stream, SHARD_ID1) + StreamPartition.of(STREAM, SHARD_ID0), + StreamPartition.of(STREAM, SHARD_ID1) ); diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java index 080759bdd2e..33ebe132d49 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java @@ -66,12 +66,12 @@ import java.util.Map; public class KinesisSamplerSpecTest extends EasyMockSupport { - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String STREAM = "sampling"; private static final String SHARD_ID = "1"; private static final DataSchema DATA_SCHEMA = new DataSchema( "test_ds", - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -99,7 +99,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), null, - objectMapper + OBJECT_MAPPER ); private final KinesisRecordSupplier recordSupplier = mock(KinesisRecordSupplier.class); @@ -183,7 +183,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport KinesisSamplerSpec samplerSpec = new TestableKinesisSamplerSpec( supervisorSpec, new SamplerConfig(5, null, null, null), - new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))), + new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))), null ); diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java index 76ea5c026ea..4d2513120f7 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java @@ -110,17 +110,17 @@ import java.util.concurrent.Executor; public class KinesisSupervisorTest extends EasyMockSupport { - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String DATASOURCE = "testDS"; private static final int TEST_CHAT_THREADS = 3; private static final long TEST_CHAT_RETRIES = 9L; private static final Period TEST_HTTP_TIMEOUT = new Period("PT10S"); private static final Period TEST_SHUTDOWN_TIMEOUT = new Period("PT80S"); - private static final String stream = "stream"; - private static final String shardId1 = "1"; - private static final String shardId0 = "0"; - private static final StreamPartition shard1Partition = StreamPartition.of(stream, shardId1); - private static final StreamPartition shard0Partition = StreamPartition.of(stream, shardId0); + private static final String STREAM = "stream"; + private static final String SHARD_ID1 = "1"; + private static final String SHARD_ID0 = "0"; + private static final StreamPartition SHARD1_PARTITION = StreamPartition.of(STREAM, SHARD_ID1); + private static final StreamPartition SHARD0_PARTITION = StreamPartition.of(STREAM, SHARD_ID0); private static DataSchema dataSchema; private KinesisRecordSupplier supervisorRecordSupplier; @@ -211,11 +211,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -250,24 +250,24 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertFalse("minimumMessageTime", taskConfig.getMinimumMessageTime().isPresent()); Assert.assertFalse("maximumMessageTime", taskConfig.getMaximumMessageTime().isPresent()); - Assert.assertEquals(stream, taskConfig.getStartSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, taskConfig.getStartSequenceNumbers().getStream()); Assert.assertEquals( "0", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( "0", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); - Assert.assertEquals(stream, taskConfig.getEndSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, taskConfig.getEndSequenceNumbers().getStream()); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - taskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - taskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + taskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } @@ -279,11 +279,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -310,11 +310,11 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(1, task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().size()); Assert.assertEquals( "0", - task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); KinesisIndexTask task2 = captured.getValues().get(1); @@ -322,11 +322,11 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(1, task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().size()); Assert.assertEquals( "0", - task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } @@ -337,11 +337,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -370,19 +370,19 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(2, task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().size()); Assert.assertEquals( "0", - task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); Assert.assertEquals( "0", - task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task1.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task1.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); KinesisIndexTask task2 = captured.getValues().get(1); @@ -390,19 +390,19 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(2, task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().size()); Assert.assertEquals( "0", - task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); Assert.assertEquals( "0", - task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task2.getIOConfig().getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + task2.getIOConfig().getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); } @@ -414,11 +414,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -466,11 +466,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -523,16 +523,16 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard1Partition)).andReturn("2").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(SHARD1_PARTITION)).andReturn("2").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -544,8 +544,8 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "2", shardId0, "1"), + STREAM, + ImmutableMap.of(SHARD_ID1, "2", SHARD_ID0, "1"), ImmutableSet.of() ) ) @@ -563,11 +563,11 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals("sequenceName-0", taskConfig.getBaseSequenceName()); Assert.assertEquals( "2", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( "1", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } @@ -578,11 +578,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -597,8 +597,8 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "101", shardId0, "-1"), + STREAM, + ImmutableMap.of(SHARD_ID1, "101", SHARD_ID0, "-1"), ImmutableSet.of() ) ) @@ -621,11 +621,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -640,11 +640,11 @@ public class KinesisSupervisorTest extends EasyMockSupport "other-datasource", 2, new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId0, "0", shardId1, "0"), - ImmutableSet.of(shardId0, shardId1) + STREAM, + ImmutableMap.of(SHARD_ID0, "0", SHARD_ID1, "0"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1", shardId1, "12")), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1", SHARD_ID1, "12")), null, null ); @@ -702,11 +702,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -719,8 +719,8 @@ public class KinesisSupervisorTest extends EasyMockSupport "id1", DATASOURCE, 0, - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "12")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "12")), null, null ); @@ -728,8 +728,8 @@ public class KinesisSupervisorTest extends EasyMockSupport "id2", DATASOURCE, 1, - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "0"), ImmutableSet.of(shardId0)), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "0"), ImmutableSet.of(SHARD_ID0)), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1")), null, null ); @@ -738,10 +738,10 @@ public class KinesisSupervisorTest extends EasyMockSupport DATASOURCE, 0, new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId0, "0", shardId1, "0"), ImmutableSet.of(shardId0, shardId1) + STREAM, + ImmutableMap.of(SHARD_ID0, "0", SHARD_ID1, "0"), ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1", shardId1, "12")), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1", SHARD_ID1, "12")), null, null ); @@ -749,8 +749,8 @@ public class KinesisSupervisorTest extends EasyMockSupport "id4", DATASOURCE, 0, - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "0"), ImmutableSet.of(shardId0)), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "0"), ImmutableSet.of(SHARD_ID0)), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1")), null, null ); @@ -783,11 +783,11 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.stopAsync("id3", false)).andReturn(Futures.immediateFuture(true)); TreeMap> checkpoints1 = new TreeMap<>(); - checkpoints1.put(0, ImmutableMap.of(shardId1, "0")); + checkpoints1.put(0, ImmutableMap.of(SHARD_ID1, "0")); TreeMap> checkpoints2 = new TreeMap<>(); - checkpoints2.put(0, ImmutableMap.of(shardId0, "0")); + checkpoints2.put(0, ImmutableMap.of(SHARD_ID0, "0")); TreeMap> checkpoints4 = new TreeMap<>(); - checkpoints4.put(0, ImmutableMap.of(shardId0, "0")); + checkpoints4.put(0, ImmutableMap.of(SHARD_ID0, "0")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints1)) .times(1); @@ -814,11 +814,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -850,13 +850,13 @@ public class KinesisSupervisorTest extends EasyMockSupport checkpoints1.put( 0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0" ) ); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of( - shardId0, + SHARD_ID0, "0" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) @@ -926,11 +926,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -947,15 +947,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -984,9 +984,9 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) @@ -1063,11 +1063,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -1114,14 +1114,14 @@ public class KinesisSupervisorTest extends EasyMockSupport .anyTimes(); TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" )); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0" )); // there would be 4 tasks, 2 for each task group @@ -1188,17 +1188,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -1245,24 +1245,24 @@ public class KinesisSupervisorTest extends EasyMockSupport .times(2); EasyMock.expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "1", - shardId0, + SHARD_ID0, "0" ))) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect( taskClient.setEndOffsetsAsync( EasyMock.contains("sequenceName-0"), EasyMock.eq(ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )), EasyMock.eq(true) @@ -1272,12 +1272,12 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0" )); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of( - shardId0, + SHARD_ID0, "0" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) @@ -1301,14 +1301,14 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals("sequenceName-1", taskConfig.getBaseSequenceName()); Assert.assertTrue("isUseTransaction", taskConfig.isUseTransaction()); - Assert.assertEquals(stream, taskConfig.getStartSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, taskConfig.getStartSequenceNumbers().getStream()); Assert.assertEquals( "3", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( "1", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); // start sequenceNumbers should be exclusive for the second batch of tasks Assert.assertEquals( @@ -1327,17 +1327,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -1347,15 +1347,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -1382,24 +1382,24 @@ public class KinesisSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" )); EasyMock.expect(taskClient.getCheckpoints(EasyMock.anyString(), EasyMock.anyBoolean())) @@ -1423,7 +1423,7 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(3600L, payload.getDurationSeconds()); Assert.assertEquals(2, payload.getPartitions()); Assert.assertEquals(1, payload.getReplicas()); - Assert.assertEquals(stream, payload.getStream()); + Assert.assertEquals(STREAM, payload.getStream()); Assert.assertEquals(0, payload.getActiveTasks().size()); Assert.assertEquals(1, payload.getPublishingTasks().size()); Assert.assertEquals(SupervisorStateManager.BasicState.RUNNING, payload.getDetailedState()); @@ -1433,15 +1433,15 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals("id1", publishingReport.getId()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" ), publishingReport.getStartingOffsets()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ), publishingReport.getCurrentOffsets()); @@ -1455,24 +1455,24 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertTrue("isUseTransaction", capturedTaskConfig.isUseTransaction()); // check that the new task was created with starting sequences matching where the publishing task finished - Assert.assertEquals(stream, capturedTaskConfig.getStartSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, capturedTaskConfig.getStartSequenceNumbers().getStream()); Assert.assertEquals( "2", - capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( "1", - capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); - Assert.assertEquals(stream, capturedTaskConfig.getEndSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, capturedTaskConfig.getEndSequenceNumbers().getStream()); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } @@ -1484,17 +1484,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -1504,15 +1504,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -1539,15 +1539,15 @@ public class KinesisSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); @@ -1569,7 +1569,7 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(3600L, payload.getDurationSeconds()); Assert.assertEquals(2, payload.getPartitions()); Assert.assertEquals(1, payload.getReplicas()); - Assert.assertEquals(stream, payload.getStream()); + Assert.assertEquals(STREAM, payload.getStream()); Assert.assertEquals(0, payload.getActiveTasks().size()); Assert.assertEquals(1, payload.getPublishingTasks().size()); Assert.assertEquals(SupervisorStateManager.BasicState.RUNNING, payload.getDetailedState()); @@ -1579,15 +1579,15 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals("id1", publishingReport.getId()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" ), publishingReport.getStartingOffsets()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ), publishingReport.getCurrentOffsets()); @@ -1601,24 +1601,24 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertTrue("isUseTransaction", capturedTaskConfig.isUseTransaction()); // check that the new task was created with starting sequences matching where the publishing task finished - Assert.assertEquals(stream, capturedTaskConfig.getStartSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, capturedTaskConfig.getStartSequenceNumbers().getStream()); Assert.assertEquals( "2", - capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( "1", - capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + capturedTaskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); - Assert.assertEquals(stream, capturedTaskConfig.getEndSequenceNumbers().getStream()); + Assert.assertEquals(STREAM, capturedTaskConfig.getEndSequenceNumbers().getStream()); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertEquals( KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + capturedTaskConfig.getEndSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } @@ -1633,17 +1633,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( @@ -1652,15 +1652,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -1674,14 +1674,14 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "2", shardId0, "1"), ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "2", SHARD_ID0, "1"), ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -1713,22 +1713,22 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id2", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ))); @@ -1737,9 +1737,9 @@ public class KinesisSupervisorTest extends EasyMockSupport // since id1 is publishing, so getCheckpoints wouldn't be called for it TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) @@ -1762,7 +1762,7 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals(3600L, payload.getDurationSeconds()); Assert.assertEquals(2, payload.getPartitions()); Assert.assertEquals(1, payload.getReplicas()); - Assert.assertEquals(stream, payload.getStream()); + Assert.assertEquals(STREAM, payload.getStream()); Assert.assertEquals(1, payload.getActiveTasks().size()); Assert.assertEquals(1, payload.getPublishingTasks().size()); Assert.assertEquals(SupervisorStateManager.BasicState.RUNNING, payload.getDetailedState()); @@ -1774,29 +1774,29 @@ public class KinesisSupervisorTest extends EasyMockSupport Assert.assertEquals("id2", activeReport.getId()); Assert.assertEquals(startTime, activeReport.getStartTime()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ), activeReport.getStartingOffsets()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ), activeReport.getCurrentOffsets()); Assert.assertEquals("id1", publishingReport.getId()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" ), publishingReport.getStartingOffsets()); Assert.assertEquals(ImmutableMap.of( - shardId1, + SHARD_ID1, "2", - shardId0, + SHARD_ID0, "1" ), publishingReport.getCurrentOffsets()); } @@ -1807,17 +1807,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -1845,16 +1845,16 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0", - shardId0, + SHARD_ID0, "0" )); TreeMap> checkpoints2 = new TreeMap<>(); - checkpoints2.put(0, ImmutableMap.of(shardId0, "0")); + checkpoints2.put(0, ImmutableMap.of(SHARD_ID0, "0")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints1)) .times(2); @@ -1889,17 +1889,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1M", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -1931,11 +1931,11 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0" )); TreeMap> checkpoints2 = new TreeMap<>(); - checkpoints2.put(0, ImmutableMap.of(shardId0, "0")); + checkpoints2.put(0, ImmutableMap.of(SHARD_ID0, "0")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints1)) .times(2); @@ -1981,10 +1981,10 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisIndexTaskIOConfig taskConfig = ((KinesisIndexTask) task).getIOConfig(); Assert.assertEquals( "0", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); Assert.assertNull( - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId0) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID0) ); } } @@ -1997,17 +1997,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1M", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -2039,11 +2039,11 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "0" )); TreeMap> checkpoints2 = new TreeMap<>(); - checkpoints2.put(0, ImmutableMap.of(shardId0, "0")); + checkpoints2.put(0, ImmutableMap.of(SHARD_ID0, "0")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints1)) .times(2); @@ -2071,18 +2071,18 @@ public class KinesisSupervisorTest extends EasyMockSupport .times(2); EasyMock.expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "1" ))) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "3" ))); EasyMock.expect( taskClient.setEndOffsetsAsync( EasyMock.contains("sequenceName-0"), EasyMock.eq(ImmutableMap.of( - shardId1, + SHARD_ID1, "3" )), EasyMock.eq(true) @@ -2105,7 +2105,7 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisIndexTaskIOConfig taskConfig = ((KinesisIndexTask) task).getIOConfig(); Assert.assertEquals( "0", - taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(shardId1) + taskConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap().get(SHARD_ID1) ); } } @@ -2145,17 +2145,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -2165,15 +2165,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2188,19 +2188,19 @@ public class KinesisSupervisorTest extends EasyMockSupport new SeekableStreamStartSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" ), - ImmutableSet.of(shardId0, shardId1) + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2214,15 +2214,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "3", shardId0, "1"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "3", SHARD_ID0, "1"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2258,18 +2258,18 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); // getCheckpoints will not be called for id1 as it is in publishing state TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) @@ -2290,15 +2290,15 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); EasyMock.expect(taskClient.pauseAsync("id2")) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ), true)) .andReturn(Futures.immediateFuture(true)); @@ -2366,11 +2366,11 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisDataSourceMetadata kinesisDataSourceMetadata = new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, + STREAM, ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ), ImmutableSet.of() @@ -2379,9 +2379,9 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisDataSourceMetadata resetMetadata = new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, + STREAM, ImmutableMap.of( - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ), ImmutableSet.of() @@ -2390,9 +2390,9 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisDataSourceMetadata expectedMetadata = new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, + STREAM, ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ), ImmutableSet.of() @@ -2442,8 +2442,8 @@ public class KinesisSupervisorTest extends EasyMockSupport KinesisDataSourceMetadata resetMetadata = new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER), + STREAM, + ImmutableMap.of(SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER), ImmutableSet.of() ) ); @@ -2464,11 +2464,11 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -2488,7 +2488,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)) .andReturn( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of("1", "100", "2", "200")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of("1", "100", "2", "200")) ) ).times(4); // getOffsetFromStorageForPartition() throws an exception when the offsets are automatically reset. @@ -2499,7 +2499,7 @@ public class KinesisSupervisorTest extends EasyMockSupport DATASOURCE, new KinesisDataSourceMetadata( // Only one partition is reset in a single supervisor run. - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of("2", "200")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of("2", "200")) ) ) ).andReturn(true); @@ -2520,17 +2520,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -2540,15 +2540,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2562,15 +2562,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "3", shardId0, "1"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "3", SHARD_ID0, "1"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2584,15 +2584,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "3", shardId0, "1"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "3", SHARD_ID0, "1"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2628,22 +2628,22 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); @@ -2684,15 +2684,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2706,15 +2706,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2728,15 +2728,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2746,17 +2746,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -2785,9 +2785,9 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "10", - shardId0, + SHARD_ID0, "20" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) @@ -2831,15 +2831,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2853,15 +2853,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2875,15 +2875,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -2900,17 +2900,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -2941,7 +2941,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); final TreeMap> checkpoints = new TreeMap<>(); - checkpoints.put(0, ImmutableMap.of(shardId1, "10", shardId0, "20")); + checkpoints.put(0, ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints)) .times(1); @@ -2963,7 +2963,7 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata( - new SeekableStreamStartSequenceNumbers<>(stream, checkpoints.get(0), checkpoints.get(0).keySet()) + new SeekableStreamStartSequenceNumbers<>(STREAM, checkpoints.get(0), checkpoints.get(0).keySet()) ) ); @@ -2987,17 +2987,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -3008,15 +3008,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3030,15 +3030,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3052,15 +3052,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "10", shardId0, "20"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "10", SHARD_ID0, "20"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3090,7 +3090,7 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata( - new SeekableStreamStartSequenceNumbers<>(stream, Collections.emptyMap(), ImmutableSet.of()) + new SeekableStreamStartSequenceNumbers<>(STREAM, Collections.emptyMap(), ImmutableSet.of()) ) ); @@ -3124,10 +3124,10 @@ public class KinesisSupervisorTest extends EasyMockSupport "id1", DATASOURCE, 0, - new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), + new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) + ImmutableMap.of(SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) ), null, null @@ -3137,10 +3137,10 @@ public class KinesisSupervisorTest extends EasyMockSupport "id2", DATASOURCE, 0, - new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(shardId1, "0"), ImmutableSet.of(shardId1)), + new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of(SHARD_ID1)), new SeekableStreamEndSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) + ImmutableMap.of(SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) ), null, null @@ -3150,16 +3150,16 @@ public class KinesisSupervisorTest extends EasyMockSupport "id3", DATASOURCE, 0, - new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(shardId1, "0"), ImmutableSet.of(shardId1)), + new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of(SHARD_ID1)), new SeekableStreamEndSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) + ImmutableMap.of(SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER) ), null, null ); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)).andReturn(Collections.emptySet()).anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); @@ -3178,7 +3178,7 @@ public class KinesisSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)) .anyTimes(); final TreeMap> checkpoints = new TreeMap<>(); - checkpoints.put(0, ImmutableMap.of(shardId1, "0")); + checkpoints.put(0, ImmutableMap.of(SHARD_ID1, "0")); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.anyString(), EasyMock.anyBoolean())) .andReturn(Futures.immediateFuture(checkpoints)) .times(3); @@ -3186,11 +3186,11 @@ public class KinesisSupervisorTest extends EasyMockSupport .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) .anyTimes(); EasyMock.expect(taskClient.pauseAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(ImmutableMap.of(shardId1, "10"))) + .andReturn(Futures.immediateFuture(ImmutableMap.of(SHARD_ID1, "10"))) .anyTimes(); EasyMock.expect(taskClient.setEndOffsetsAsync( EasyMock.anyString(), - EasyMock.eq(ImmutableMap.of(shardId1, "10")), + EasyMock.eq(ImmutableMap.of(SHARD_ID1, "10")), EasyMock.anyBoolean() )) .andReturn(Futures.immediateFuture(true)) @@ -3206,7 +3206,7 @@ public class KinesisSupervisorTest extends EasyMockSupport null, id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata( - new SeekableStreamStartSequenceNumbers<>(stream, checkpoints.get(0), ImmutableSet.of()) + new SeekableStreamStartSequenceNumbers<>(STREAM, checkpoints.get(0), ImmutableSet.of()) ) ); @@ -3260,17 +3260,17 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)) + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)) .anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD1_PARTITION)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(SHARD0_PARTITION)).andReturn("1").anyTimes(); supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); EasyMock.expectLastCall().anyTimes(); @@ -3281,15 +3281,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "0", shardId0, "0"), + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3303,15 +3303,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "3", shardId0, "1"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "3", SHARD_ID0, "1"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3325,15 +3325,15 @@ public class KinesisSupervisorTest extends EasyMockSupport 0, new SeekableStreamStartSequenceNumbers<>( "stream", - ImmutableMap.of(shardId1, "3", shardId0, "1"), - ImmutableSet.of(shardId0, shardId1) + ImmutableMap.of(SHARD_ID1, "3", SHARD_ID0, "1"), + ImmutableSet.of(SHARD_ID0, SHARD_ID1) ), new SeekableStreamEndSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER, - shardId0, + SHARD_ID0, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER ) ), @@ -3369,18 +3369,18 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); // getCheckpoints will not be called for id1 as it is in publishing state TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId1, + SHARD_ID1, "3", - shardId0, + SHARD_ID0, "1" )); EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) @@ -3394,15 +3394,15 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskClient.pauseAsync("id2")) .andReturn(Futures.immediateFuture(ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ))); EasyMock.expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of( - shardId1, + SHARD_ID1, "12", - shardId0, + SHARD_ID0, "1" ), true)) .andReturn(Futures.immediateFuture(true)); @@ -3508,10 +3508,10 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)).anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -3525,19 +3525,19 @@ public class KinesisSupervisorTest extends EasyMockSupport DATASOURCE, 0, new SeekableStreamStartSequenceNumbers<>( - stream, + STREAM, ImmutableMap.of( - shardId0, + SHARD_ID0, "0", - shardId1, + SHARD_ID1, "0" ), ImmutableSet.of() ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of( - shardId0, + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of( + SHARD_ID0, "1", - shardId1, + SHARD_ID1, "12" )), null, @@ -3569,9 +3569,9 @@ public class KinesisSupervisorTest extends EasyMockSupport TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of( - shardId0, + SHARD_ID0, "0", - shardId1, + SHARD_ID1, "0" )); @@ -3604,10 +3604,10 @@ public class KinesisSupervisorTest extends EasyMockSupport ); supervisorRecordSupplier.assign(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); - EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) - .andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(STREAM)) + .andReturn(ImmutableSet.of(SHARD_ID1, SHARD_ID0)).anyTimes(); EasyMock.expect(supervisorRecordSupplier.getAssignment()) - .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .andReturn(ImmutableSet.of(SHARD1_PARTITION, SHARD0_PARTITION)) .anyTimes(); supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); EasyMock.expectLastCall().anyTimes(); @@ -3621,19 +3621,19 @@ public class KinesisSupervisorTest extends EasyMockSupport DATASOURCE, 0, new SeekableStreamStartSequenceNumbers<>( - stream, + STREAM, ImmutableMap.of( - shardId0, + SHARD_ID0, "0", - shardId1, + SHARD_ID1, "0" ), ImmutableSet.of() ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of( - shardId0, + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of( + SHARD_ID0, "1", - shardId1, + SHARD_ID1, "12" )), null, @@ -3691,7 +3691,7 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor.addTaskGroupToActivelyReadingTaskGroup( 42, - ImmutableMap.of(shardId1, "3"), + ImmutableMap.of(SHARD_ID1, "3"), Optional.of(minMessageTime), Optional.of(maxMessageTime), ImmutableSet.of("id1", "id2", "id3", "id4"), @@ -3737,11 +3737,11 @@ public class KinesisSupervisorTest extends EasyMockSupport "id1", 0, new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, "3" ), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER )), minMessageTime, @@ -3753,11 +3753,11 @@ public class KinesisSupervisorTest extends EasyMockSupport "id2", 0, new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, "3" ), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER )), minMessageTime, @@ -3769,11 +3769,11 @@ public class KinesisSupervisorTest extends EasyMockSupport "id3", 0, new SeekableStreamStartSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, "3" ), ImmutableSet.of()), new SeekableStreamEndSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER )), minMessageTime, @@ -3788,13 +3788,13 @@ public class KinesisSupervisorTest extends EasyMockSupport new SeekableStreamStartSequenceNumbers<>( "stream", ImmutableMap.of( - shardId1, + SHARD_ID1, "4" // this is the mismatch ), ImmutableSet.of() ), new SeekableStreamEndSequenceNumbers<>("stream", ImmutableMap.of( - shardId1, + SHARD_ID1, KinesisSequenceNumber.NO_END_SEQUENCE_NUMBER )), minMessageTime, @@ -3858,7 +3858,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) { KinesisSupervisorIOConfig kinesisSupervisorIOConfig = new KinesisSupervisorIOConfig( - stream, + STREAM, "awsEndpoint", null, replicas, @@ -3936,7 +3936,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KinesisSupervisorSpec( dataSchema, tuningConfig, @@ -3947,7 +3947,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -3993,7 +3993,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) { KinesisSupervisorIOConfig kinesisSupervisorIOConfig = new KinesisSupervisorIOConfig( - stream, + STREAM, "awsEndpoint", null, replicas, @@ -4038,7 +4038,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KinesisSupervisorSpec( dataSchema, tuningConfig, @@ -4049,7 +4049,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -4077,7 +4077,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) { KinesisSupervisorIOConfig kinesisSupervisorIOConfig = new KinesisSupervisorIOConfig( - stream, + STREAM, "awsEndpoint", null, replicas, @@ -4122,7 +4122,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KinesisSupervisorSpec( dataSchema, tuningConfig, @@ -4133,7 +4133,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -4163,7 +4163,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) { KinesisSupervisorIOConfig kinesisSupervisorIOConfig = new KinesisSupervisorIOConfig( - stream, + STREAM, "awsEndpoint", null, replicas, @@ -4208,7 +4208,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new KinesisSupervisorSpec( dataSchema, tuningConfig, @@ -4219,7 +4219,7 @@ public class KinesisSupervisorTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, new NoopServiceEmitter(), new DruidMonitorSchedulerConfig(), rowIngestionMetersFactory, @@ -4239,7 +4239,7 @@ public class KinesisSupervisorTest extends EasyMockSupport return new DataSchema( dataSource, - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -4262,7 +4262,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ImmutableList.of() ), null, - objectMapper + OBJECT_MAPPER ); } diff --git a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java index 9b55a462bd4..b47446e3f76 100644 --- a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/NamespacedExtractorModuleTest.java @@ -51,7 +51,7 @@ import java.util.Map; */ public class NamespacedExtractorModuleTest { - private static final ObjectMapper mapper = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper()); + private static final ObjectMapper MAPPER = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper()); private CacheScheduler scheduler; private Lifecycle lifecycle; @@ -93,7 +93,7 @@ public class NamespacedExtractorModuleTest { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriCacheGenerator factory = new UriCacheGenerator( ImmutableMap.of("file", new LocalFileTimestampVersionFinder()) @@ -119,7 +119,7 @@ public class NamespacedExtractorModuleTest { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -140,7 +140,7 @@ public class NamespacedExtractorModuleTest { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), @@ -161,7 +161,7 @@ public class NamespacedExtractorModuleTest { final File tmpFile = temporaryFolder.newFile(); try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) { - out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar"))); + out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar"))); } final UriExtractionNamespace namespace = new UriExtractionNamespace( tmpFile.toURI(), diff --git a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java index 096fced75cf..6dfc88df527 100644 --- a/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java +++ b/extensions-core/lookups-cached-global/src/test/java/org/apache/druid/server/lookup/namespace/cache/JdbcExtractionNamespaceTest.java @@ -70,12 +70,12 @@ public class JdbcExtractionNamespaceTest @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); private static final Logger log = new Logger(JdbcExtractionNamespaceTest.class); - private static final String tableName = "abstractDbRenameTest"; - private static final String keyName = "keyName"; - private static final String valName = "valName"; - private static final String tsColumn_ = "tsColumn"; - private static final String filterColumn = "filterColumn"; - private static final Map renames = ImmutableMap.of( + private static final String TABLE_NAME = "abstractDbRenameTest"; + private static final String KEY_NAME = "keyName"; + private static final String VAL_NAME = "valName"; + private static final String TS_COLUMN = "tsColumn"; + private static final String FILTER_COLUMN = "filterColumn"; + private static final Map RENAMES = ImmutableMap.of( "foo", new String[]{"bar", "1"}, "bad", new String[]{"bar", "1"}, "how about that", new String[]{"foo", "0"}, @@ -129,22 +129,22 @@ public class JdbcExtractionNamespaceTest handle.createStatement( StringUtils.format( "CREATE TABLE %s (%s TIMESTAMP, %s VARCHAR(64), %s VARCHAR(64), %s VARCHAR(64))", - tableName, - tsColumn_, - filterColumn, - keyName, - valName + TABLE_NAME, + TS_COLUMN, + FILTER_COLUMN, + KEY_NAME, + VAL_NAME ) ).setQueryTimeout(1).execute() ); - handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute(); + handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", TABLE_NAME)).setQueryTimeout(1).execute(); handle.commit(); closer.register(new Closeable() { @Override public void close() throws IOException { - handle.createStatement("DROP TABLE " + tableName).setQueryTimeout(1).execute(); + handle.createStatement("DROP TABLE " + TABLE_NAME).setQueryTimeout(1).execute(); final ListenableFuture future = setupTeardownService.submit(new Runnable() { @Override @@ -179,7 +179,7 @@ public class JdbcExtractionNamespaceTest Assert.assertEquals(0, scheduler.getActiveEntries()); } }); - for (Map.Entry entry : renames.entrySet()) { + for (Map.Entry entry : RENAMES.entrySet()) { try { String key = entry.getKey(); String value = entry.getValue()[0]; @@ -338,19 +338,19 @@ public class JdbcExtractionNamespaceTest final String statementVal = val != null ? "'%s'" : "%s"; if (tsColumn == null) { handle.createStatement( - StringUtils.format("DELETE FROM %s WHERE %s='%s'", tableName, keyName, key) + StringUtils.format("DELETE FROM %s WHERE %s='%s'", TABLE_NAME, KEY_NAME, key) ).setQueryTimeout(1).execute(); query = StringUtils.format( "INSERT INTO %s (%s, %s, %s) VALUES ('%s', '%s', " + statementVal + ")", - tableName, - filterColumn, keyName, valName, + TABLE_NAME, + FILTER_COLUMN, KEY_NAME, VAL_NAME, filter, key, val ); } else { query = StringUtils.format( "INSERT INTO %s (%s, %s, %s, %s) VALUES ('%s', '%s', '%s', " + statementVal + ")", - tableName, - tsColumn, filterColumn, keyName, valName, + TABLE_NAME, + tsColumn, FILTER_COLUMN, KEY_NAME, VAL_NAME, updateTs, filter, key, val ); } @@ -367,9 +367,9 @@ public class JdbcExtractionNamespaceTest { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), - tableName, - keyName, - valName, + TABLE_NAME, + KEY_NAME, + VAL_NAME, tsColumn, null, new Period(0) @@ -378,7 +378,7 @@ public class JdbcExtractionNamespaceTest CacheSchedulerTest.waitFor(entry); final Map map = entry.getCache(); - for (Map.Entry e : renames.entrySet()) { + for (Map.Entry e : RENAMES.entrySet()) { String key = e.getKey(); String[] val = e.getValue(); String field = val[0]; @@ -398,18 +398,18 @@ public class JdbcExtractionNamespaceTest { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), - tableName, - keyName, - valName, + TABLE_NAME, + KEY_NAME, + VAL_NAME, tsColumn, - filterColumn + "='1'", + FILTER_COLUMN + "='1'", new Period(0) ); try (CacheScheduler.Entry entry = scheduler.schedule(extractionNamespace)) { CacheSchedulerTest.waitFor(entry); final Map map = entry.getCache(); - for (Map.Entry e : renames.entrySet()) { + for (Map.Entry e : RENAMES.entrySet()) { String key = e.getKey(); String[] val = e.getValue(); String field = val[0]; @@ -470,9 +470,9 @@ public class JdbcExtractionNamespaceTest { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), - tableName, - keyName, - valName, + TABLE_NAME, + KEY_NAME, + VAL_NAME, tsColumn, "some filter", new Period(10) @@ -491,9 +491,9 @@ public class JdbcExtractionNamespaceTest { final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace( derbyConnectorRule.getMetadataConnectorConfig(), - tableName, - keyName, - valName, + TABLE_NAME, + KEY_NAME, + VAL_NAME, tsColumn, null, new Period(10) diff --git a/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/PollingLookupTest.java b/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/PollingLookupTest.java index ebccdbd5ee8..9ce0e1c52a7 100644 --- a/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/PollingLookupTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/PollingLookupTest.java @@ -47,14 +47,14 @@ import java.util.Map; @RunWith(Parameterized.class) public class PollingLookupTest { - private static final Map firstLookupMap = ImmutableMap.of( + private static final Map FIRST_LOOKUP_MAP = ImmutableMap.of( "foo", "bar", "bad", "bar", "how about that", "foo", "empty string", "" ); - private static final Map secondLookupMap = ImmutableMap.of( + private static final Map SECOND_LOOKUP_MAP = ImmutableMap.of( "new-foo", "new-bar", "new-bad", "new-bar" ); @@ -71,9 +71,9 @@ public class PollingLookupTest { if (callNumber == 0) { callNumber++; - return firstLookupMap.entrySet(); + return FIRST_LOOKUP_MAP.entrySet(); } - return secondLookupMap.entrySet(); + return SECOND_LOOKUP_MAP.entrySet(); } @Nullable @@ -145,15 +145,15 @@ public class PollingLookupTest @Test public void testApply() { - assertMapLookup(firstLookupMap, pollingLookup); + assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup); } @Test(timeout = POLL_PERIOD * 3) public void testApplyAfterDataChange() throws InterruptedException { - assertMapLookup(firstLookupMap, pollingLookup); + assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup); Thread.sleep(POLL_PERIOD * 2); - assertMapLookup(secondLookupMap, pollingLookup); + assertMapLookup(SECOND_LOOKUP_MAP, pollingLookup); } @Test @@ -184,8 +184,8 @@ public class PollingLookupTest @Test public void testBulkApply() { - Map map = pollingLookup.applyAll(firstLookupMap.keySet()); - Assert.assertEquals(firstLookupMap, Maps.transformValues(map, new Function() + Map map = pollingLookup.applyAll(FIRST_LOOKUP_MAP.keySet()); + Assert.assertEquals(FIRST_LOOKUP_MAP, Maps.transformValues(map, new Function() { @Override public String apply(String input) diff --git a/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/jdbc/JdbcDataFetcherTest.java b/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/jdbc/JdbcDataFetcherTest.java index 5f1571e7212..76187ce1513 100644 --- a/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/jdbc/JdbcDataFetcherTest.java +++ b/extensions-core/lookups-cached-single/src/test/java/org/apache/druid/server/lookup/jdbc/JdbcDataFetcherTest.java @@ -50,7 +50,7 @@ public class JdbcDataFetcherTest - private static final Map lookupMap = ImmutableMap.of( + private static final Map LOOKUP_MAP = ImmutableMap.of( "foo", "bar", "bad", "bar", "how about that", "foo", @@ -77,7 +77,7 @@ public class JdbcDataFetcherTest ); handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute(); - for (Map.Entry entry : lookupMap.entrySet()) { + for (Map.Entry entry : LOOKUP_MAP.entrySet()) { insertValues(entry.getKey(), entry.getValue(), handle); } handle.commit(); @@ -94,7 +94,7 @@ public class JdbcDataFetcherTest public void testFetch() { Assert.assertEquals("null check", null, jdbcDataFetcher.fetch("baz")); - assertMapLookup(lookupMap, jdbcDataFetcher); + assertMapLookup(LOOKUP_MAP, jdbcDataFetcher); } @Test @@ -102,15 +102,15 @@ public class JdbcDataFetcherTest { ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); jdbcDataFetcher.fetchAll().forEach(mapBuilder::put); - Assert.assertEquals("maps should match", lookupMap, mapBuilder.build()); + Assert.assertEquals("maps should match", LOOKUP_MAP, mapBuilder.build()); } @Test public void testFetchKeys() { ImmutableMap.Builder mapBuilder = ImmutableMap.builder(); - jdbcDataFetcher.fetch(lookupMap.keySet()).forEach(mapBuilder::put); - Assert.assertEquals(lookupMap, mapBuilder.build()); + jdbcDataFetcher.fetch(LOOKUP_MAP.keySet()).forEach(mapBuilder::put); + Assert.assertEquals(LOOKUP_MAP, mapBuilder.build()); } @Test diff --git a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java index 9c33b09e098..e2e0535286c 100644 --- a/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java +++ b/extensions-core/s3-extensions/src/main/java/org/apache/druid/storage/s3/S3DataSegmentPuller.java @@ -57,7 +57,7 @@ public class S3DataSegmentPuller implements URIDataPuller { public static final int DEFAULT_RETRY_COUNT = 3; - public static final String scheme = S3StorageDruidModule.SCHEME; + public static final String SCHEME = S3StorageDruidModule.SCHEME; private static final Logger log = new Logger(S3DataSegmentPuller.class); @@ -141,8 +141,8 @@ public class S3DataSegmentPuller implements URIDataPuller public static URI checkURI(URI uri) { - if (uri.getScheme().equalsIgnoreCase(scheme)) { - uri = URI.create("s3" + uri.toString().substring(scheme.length())); + if (uri.getScheme().equalsIgnoreCase(SCHEME)) { + uri = URI.create("s3" + uri.toString().substring(SCHEME.length())); } else if (!"s3".equalsIgnoreCase(uri.getScheme())) { throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString()); } diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactoryTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactoryTest.java index 49acd61dcfc..3c33bb7b48d 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactoryTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/firehose/s3/StaticS3FirehoseFactoryTest.java @@ -57,9 +57,9 @@ import java.util.stream.Collectors; */ public class StaticS3FirehoseFactoryTest { - private static final AmazonS3Client S3_ClIENT = EasyMock.createNiceMock(AmazonS3Client.class); + private static final AmazonS3Client S3_CLIENT = EasyMock.createNiceMock(AmazonS3Client.class); private static final ServerSideEncryptingAmazonS3 SERVICE = new ServerSideEncryptingAmazonS3( - S3_ClIENT, + S3_CLIENT, new NoopServerSideEncryption() ); @@ -102,7 +102,7 @@ public class StaticS3FirehoseFactoryTest uris.sort(Comparator.comparing(URI::toString)); uris.forEach(StaticS3FirehoseFactoryTest::addExpectedObjject); - EasyMock.replay(S3_ClIENT); + EasyMock.replay(S3_CLIENT); final StaticS3FirehoseFactory factory = new StaticS3FirehoseFactory( SERVICE, diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java index 34496d965d6..66a61db7015 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentMoverTest.java @@ -51,7 +51,7 @@ import java.util.Set; public class S3DataSegmentMoverTest { - private static final DataSegment sourceSegment = new DataSegment( + private static final DataSegment SOURCE_SEGMENT = new DataSegment( "test", Intervals.of("2013-01-01/2013-01-02"), "1", @@ -80,7 +80,7 @@ public class S3DataSegmentMoverTest ); DataSegment movedSegment = mover.move( - sourceSegment, + SOURCE_SEGMENT, ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); @@ -102,7 +102,7 @@ public class S3DataSegmentMoverTest ); DataSegment movedSegment = mover.move( - sourceSegment, + SOURCE_SEGMENT, ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); @@ -120,7 +120,7 @@ public class S3DataSegmentMoverTest S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig()); mover.move( - sourceSegment, + SOURCE_SEGMENT, ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive") ); } diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherConfigTest.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherConfigTest.java index b369b7a181b..af898811aed 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherConfigTest.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/S3DataSegmentPusherConfigTest.java @@ -33,7 +33,7 @@ import java.util.Set; public class S3DataSegmentPusherConfigTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testSerialization() throws IOException @@ -41,8 +41,8 @@ public class S3DataSegmentPusherConfigTest String jsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\"," + "\"disableAcl\":false,\"maxListingLength\":2000,\"useS3aSchema\":false}"; - S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class); - Assert.assertEquals(jsonConfig, jsonMapper.writeValueAsString(config)); + S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class); + Assert.assertEquals(jsonConfig, JSON_MAPPER.writeValueAsString(config)); } @Test @@ -52,8 +52,8 @@ public class S3DataSegmentPusherConfigTest String expectedJsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\"," + "\"disableAcl\":false,\"maxListingLength\":1000,\"useS3aSchema\":false}"; - S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class); - Assert.assertEquals(expectedJsonConfig, jsonMapper.writeValueAsString(config)); + S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class); + Assert.assertEquals(expectedJsonConfig, JSON_MAPPER.writeValueAsString(config)); } @Test @@ -63,7 +63,7 @@ public class S3DataSegmentPusherConfigTest + "\"disableAcl\":false,\"maxListingLength\":-1}"; Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); - S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class); + S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class); Set> violations = validator.validate(config); Assert.assertEquals(1, violations.size()); ConstraintViolation violation = Iterators.getOnlyElement(violations.iterator()); diff --git a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/variance/VarianceSerde.java b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/variance/VarianceSerde.java index d14670cc0e4..256e83ae051 100644 --- a/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/variance/VarianceSerde.java +++ b/extensions-core/stats/src/main/java/org/apache/druid/query/aggregation/variance/VarianceSerde.java @@ -38,7 +38,7 @@ import java.util.List; */ public class VarianceSerde extends ComplexMetricSerde { - private static final Ordering comparator = + private static final Ordering COMPARATOR = Ordering.from(VarianceAggregatorCollector.COMPARATOR).nullsFirst(); @Override @@ -114,7 +114,7 @@ public class VarianceSerde extends ComplexMetricSerde @Override public int compare(VarianceAggregatorCollector o1, VarianceAggregatorCollector o2) { - return comparator.compare(o1, o2); + return COMPARATOR.compare(o1, o2); } }; } diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java index d0ad8240105..1db5388948d 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceAggregatorCollectorTest.java @@ -34,7 +34,7 @@ import java.util.concurrent.ThreadLocalRandom; public class VarianceAggregatorCollectorTest { - private static final float[] market_upfront = new float[]{ + private static final float[] MARKET_UPFRONT = new float[]{ 800.0f, 800.0f, 826.0602f, 1564.6177f, 1006.4021f, 869.64374f, 809.04175f, 1458.4027f, 852.4375f, 879.9881f, 950.1468f, 712.7746f, 846.2675f, 682.8855f, 1109.875f, 594.3817f, 870.1159f, 677.511f, 1410.2781f, 1219.4321f, 979.306f, 1224.5016f, 1215.5898f, 716.6092f, 1301.0233f, 786.3633f, 989.9315f, 1609.0967f, 1023.2952f, 1367.6381f, @@ -57,7 +57,7 @@ public class VarianceAggregatorCollectorTest 989.0328f, 744.7446f, 1166.4012f, 753.105f, 962.7312f, 780.272f }; - private static final float[] market_total_market = new float[]{ + private static final float[] MARKET_TOTAL_MARKET = new float[]{ 1000.0f, 1000.0f, 1040.9456f, 1689.0128f, 1049.142f, 1073.4766f, 1007.36554f, 1545.7089f, 1016.9652f, 1077.6127f, 1075.0896f, 953.9954f, 1022.7833f, 937.06195f, 1156.7448f, 849.8775f, 1066.208f, 904.34064f, 1240.5255f, 1343.2325f, 1088.9431f, 1349.2544f, 1102.8667f, 939.2441f, 1109.8754f, 997.99457f, 1037.4495f, 1686.4197f, @@ -85,7 +85,7 @@ public class VarianceAggregatorCollectorTest public void testVariance() { Random random = ThreadLocalRandom.current(); - for (float[] values : Arrays.asList(market_upfront, market_total_market)) { + for (float[] values : Arrays.asList(MARKET_UPFRONT, MARKET_TOTAL_MARKET)) { double sum = 0; for (float f : values) { sum += f; diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java index d769eff75a4..e2e1d60edea 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceGroupByQueryTest.java @@ -96,12 +96,12 @@ public class VarianceGroupByQueryTest { GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(VarianceTestHelper.indexVarianceAggr) - .setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr)) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(VarianceTestHelper.INDEX_VARIANCE_AGGR) + .setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR)) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); VarianceTestHelper.RowBuilder builder = @@ -138,16 +138,16 @@ public class VarianceGroupByQueryTest { GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, - VarianceTestHelper.indexVarianceAggr, + QueryRunnerTestHelper.ROWS_COUNT, + VarianceTestHelper.INDEX_VARIANCE_AGGR, new LongSumAggregatorFactory("idx", "index") ) - .setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr)) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR)) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); VarianceTestHelper.RowBuilder builder = @@ -188,20 +188,20 @@ public class VarianceGroupByQueryTest GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - VarianceTestHelper.indexVarianceAggr + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + VarianceTestHelper.INDEX_VARIANCE_AGGR ) - .setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr)) + .setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( ImmutableList.of( - new GreaterThanHavingSpec(VarianceTestHelper.stddevOfIndexMetric, 15L) // 3 rows + new GreaterThanHavingSpec(VarianceTestHelper.STD_DEV_OF_INDEX_METRIC, 15L) // 3 rows ) ) ) @@ -220,7 +220,7 @@ public class VarianceGroupByQueryTest new DefaultLimitSpec( Collections.singletonList( OrderByColumnSpec.asc( - VarianceTestHelper.stddevOfIndexMetric + VarianceTestHelper.STD_DEV_OF_INDEX_METRIC ) ), 2 ) diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java index e6e9f007519..e3857f73ccc 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTestHelper.java @@ -48,26 +48,26 @@ public class VarianceTestHelper extends QueryRunnerTestHelper module.configure(null); } - public static final String indexVarianceMetric = "index_var"; + public static final String INDEX_VARIANCE_METRIC = "index_var"; - public static final VarianceAggregatorFactory indexVarianceAggr = new VarianceAggregatorFactory( - indexVarianceMetric, - indexMetric + public static final VarianceAggregatorFactory INDEX_VARIANCE_AGGR = new VarianceAggregatorFactory( + INDEX_VARIANCE_METRIC, + INDEX_METRIC ); - public static final String stddevOfIndexMetric = "index_stddev"; + public static final String STD_DEV_OF_INDEX_METRIC = "index_stddev"; - public static final PostAggregator stddevOfIndexPostAggr = new StandardDeviationPostAggregator( - stddevOfIndexMetric, - indexVarianceMetric, + public static final PostAggregator STD_DEV_OF_INDEX_POST_AGGR = new StandardDeviationPostAggregator( + STD_DEV_OF_INDEX_METRIC, + INDEX_VARIANCE_METRIC, null ); - public static final List commonPlusVarAggregators = Arrays.asList( - rowsCount, - indexDoubleSum, - qualityUniques, - indexVarianceAggr + public static final List COMMON_PLUS_VAR_AGGREGATORS = Arrays.asList( + ROWS_COUNT, + INDEX_DOUBLE_SUM, + QUALITY_UNIQUES, + INDEX_VARIANCE_AGGR ); public static class RowBuilder diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java index 4656f00e0b3..82ebc34e4ef 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTimeseriesQueryTest.java @@ -70,14 +70,14 @@ public class VarianceTimeseriesQueryTest public void testTimeseriesWithNullFilterOnNonExistentDimension() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters("bobby", null) - .intervals(QueryRunnerTestHelper.firstToThird) - .aggregators(VarianceTestHelper.commonPlusVarAggregators) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) + .aggregators(VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - VarianceTestHelper.stddevOfIndexPostAggr + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR ) .descending(descending) .build(); diff --git a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTopNQueryTest.java b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTopNQueryTest.java index 0a493c1c051..72cf6758b24 100644 --- a/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTopNQueryTest.java +++ b/extensions-core/stats/src/test/java/org/apache/druid/query/aggregation/variance/VarianceTopNQueryTest.java @@ -68,16 +68,16 @@ public class VarianceTopNQueryTest public void testFullOnTopNOverUniques() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.uniqueMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - VarianceTestHelper.commonPlusVarAggregators, + VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -85,7 +85,7 @@ public class VarianceTopNQueryTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( diff --git a/hll/src/main/java/org/apache/druid/hll/ByteBitLookup.java b/hll/src/main/java/org/apache/druid/hll/ByteBitLookup.java index f8a236563a0..cef10870382 100644 --- a/hll/src/main/java/org/apache/druid/hll/ByteBitLookup.java +++ b/hll/src/main/java/org/apache/druid/hll/ByteBitLookup.java @@ -23,266 +23,266 @@ package org.apache.druid.hll; */ public class ByteBitLookup { - public static final byte[] lookup; + public static final byte[] LOOKUP; static { - lookup = new byte[256]; + LOOKUP = new byte[256]; - lookup[0] = 0; - lookup[1] = 1; - lookup[2] = 2; - lookup[3] = 1; - lookup[4] = 3; - lookup[5] = 1; - lookup[6] = 2; - lookup[7] = 1; - lookup[8] = 4; - lookup[9] = 1; - lookup[10] = 2; - lookup[11] = 1; - lookup[12] = 3; - lookup[13] = 1; - lookup[14] = 2; - lookup[15] = 1; - lookup[16] = 5; - lookup[17] = 1; - lookup[18] = 2; - lookup[19] = 1; - lookup[20] = 3; - lookup[21] = 1; - lookup[22] = 2; - lookup[23] = 1; - lookup[24] = 4; - lookup[25] = 1; - lookup[26] = 2; - lookup[27] = 1; - lookup[28] = 3; - lookup[29] = 1; - lookup[30] = 2; - lookup[31] = 1; - lookup[32] = 6; - lookup[33] = 1; - lookup[34] = 2; - lookup[35] = 1; - lookup[36] = 3; - lookup[37] = 1; - lookup[38] = 2; - lookup[39] = 1; - lookup[40] = 4; - lookup[41] = 1; - lookup[42] = 2; - lookup[43] = 1; - lookup[44] = 3; - lookup[45] = 1; - lookup[46] = 2; - lookup[47] = 1; - lookup[48] = 5; - lookup[49] = 1; - lookup[50] = 2; - lookup[51] = 1; - lookup[52] = 3; - lookup[53] = 1; - lookup[54] = 2; - lookup[55] = 1; - lookup[56] = 4; - lookup[57] = 1; - lookup[58] = 2; - lookup[59] = 1; - lookup[60] = 3; - lookup[61] = 1; - lookup[62] = 2; - lookup[63] = 1; - lookup[64] = 7; - lookup[65] = 1; - lookup[66] = 2; - lookup[67] = 1; - lookup[68] = 3; - lookup[69] = 1; - lookup[70] = 2; - lookup[71] = 1; - lookup[72] = 4; - lookup[73] = 1; - lookup[74] = 2; - lookup[75] = 1; - lookup[76] = 3; - lookup[77] = 1; - lookup[78] = 2; - lookup[79] = 1; - lookup[80] = 5; - lookup[81] = 1; - lookup[82] = 2; - lookup[83] = 1; - lookup[84] = 3; - lookup[85] = 1; - lookup[86] = 2; - lookup[87] = 1; - lookup[88] = 4; - lookup[89] = 1; - lookup[90] = 2; - lookup[91] = 1; - lookup[92] = 3; - lookup[93] = 1; - lookup[94] = 2; - lookup[95] = 1; - lookup[96] = 6; - lookup[97] = 1; - lookup[98] = 2; - lookup[99] = 1; - lookup[100] = 3; - lookup[101] = 1; - lookup[102] = 2; - lookup[103] = 1; - lookup[104] = 4; - lookup[105] = 1; - lookup[106] = 2; - lookup[107] = 1; - lookup[108] = 3; - lookup[109] = 1; - lookup[110] = 2; - lookup[111] = 1; - lookup[112] = 5; - lookup[113] = 1; - lookup[114] = 2; - lookup[115] = 1; - lookup[116] = 3; - lookup[117] = 1; - lookup[118] = 2; - lookup[119] = 1; - lookup[120] = 4; - lookup[121] = 1; - lookup[122] = 2; - lookup[123] = 1; - lookup[124] = 3; - lookup[125] = 1; - lookup[126] = 2; - lookup[127] = 1; - lookup[128] = 8; - lookup[129] = 1; - lookup[130] = 2; - lookup[131] = 1; - lookup[132] = 3; - lookup[133] = 1; - lookup[134] = 2; - lookup[135] = 1; - lookup[136] = 4; - lookup[137] = 1; - lookup[138] = 2; - lookup[139] = 1; - lookup[140] = 3; - lookup[141] = 1; - lookup[142] = 2; - lookup[143] = 1; - lookup[144] = 5; - lookup[145] = 1; - lookup[146] = 2; - lookup[147] = 1; - lookup[148] = 3; - lookup[149] = 1; - lookup[150] = 2; - lookup[151] = 1; - lookup[152] = 4; - lookup[153] = 1; - lookup[154] = 2; - lookup[155] = 1; - lookup[156] = 3; - lookup[157] = 1; - lookup[158] = 2; - lookup[159] = 1; - lookup[160] = 6; - lookup[161] = 1; - lookup[162] = 2; - lookup[163] = 1; - lookup[164] = 3; - lookup[165] = 1; - lookup[166] = 2; - lookup[167] = 1; - lookup[168] = 4; - lookup[169] = 1; - lookup[170] = 2; - lookup[171] = 1; - lookup[172] = 3; - lookup[173] = 1; - lookup[174] = 2; - lookup[175] = 1; - lookup[176] = 5; - lookup[177] = 1; - lookup[178] = 2; - lookup[179] = 1; - lookup[180] = 3; - lookup[181] = 1; - lookup[182] = 2; - lookup[183] = 1; - lookup[184] = 4; - lookup[185] = 1; - lookup[186] = 2; - lookup[187] = 1; - lookup[188] = 3; - lookup[189] = 1; - lookup[190] = 2; - lookup[191] = 1; - lookup[192] = 7; - lookup[193] = 1; - lookup[194] = 2; - lookup[195] = 1; - lookup[196] = 3; - lookup[197] = 1; - lookup[198] = 2; - lookup[199] = 1; - lookup[200] = 4; - lookup[201] = 1; - lookup[202] = 2; - lookup[203] = 1; - lookup[204] = 3; - lookup[205] = 1; - lookup[206] = 2; - lookup[207] = 1; - lookup[208] = 5; - lookup[209] = 1; - lookup[210] = 2; - lookup[211] = 1; - lookup[212] = 3; - lookup[213] = 1; - lookup[214] = 2; - lookup[215] = 1; - lookup[216] = 4; - lookup[217] = 1; - lookup[218] = 2; - lookup[219] = 1; - lookup[220] = 3; - lookup[221] = 1; - lookup[222] = 2; - lookup[223] = 1; - lookup[224] = 6; - lookup[225] = 1; - lookup[226] = 2; - lookup[227] = 1; - lookup[228] = 3; - lookup[229] = 1; - lookup[230] = 2; - lookup[231] = 1; - lookup[232] = 4; - lookup[233] = 1; - lookup[234] = 2; - lookup[235] = 1; - lookup[236] = 3; - lookup[237] = 1; - lookup[238] = 2; - lookup[239] = 1; - lookup[240] = 5; - lookup[241] = 1; - lookup[242] = 2; - lookup[243] = 1; - lookup[244] = 3; - lookup[245] = 1; - lookup[246] = 2; - lookup[247] = 1; - lookup[248] = 4; - lookup[249] = 1; - lookup[250] = 2; - lookup[251] = 1; - lookup[252] = 3; - lookup[253] = 1; - lookup[254] = 2; - lookup[255] = 1; + LOOKUP[0] = 0; + LOOKUP[1] = 1; + LOOKUP[2] = 2; + LOOKUP[3] = 1; + LOOKUP[4] = 3; + LOOKUP[5] = 1; + LOOKUP[6] = 2; + LOOKUP[7] = 1; + LOOKUP[8] = 4; + LOOKUP[9] = 1; + LOOKUP[10] = 2; + LOOKUP[11] = 1; + LOOKUP[12] = 3; + LOOKUP[13] = 1; + LOOKUP[14] = 2; + LOOKUP[15] = 1; + LOOKUP[16] = 5; + LOOKUP[17] = 1; + LOOKUP[18] = 2; + LOOKUP[19] = 1; + LOOKUP[20] = 3; + LOOKUP[21] = 1; + LOOKUP[22] = 2; + LOOKUP[23] = 1; + LOOKUP[24] = 4; + LOOKUP[25] = 1; + LOOKUP[26] = 2; + LOOKUP[27] = 1; + LOOKUP[28] = 3; + LOOKUP[29] = 1; + LOOKUP[30] = 2; + LOOKUP[31] = 1; + LOOKUP[32] = 6; + LOOKUP[33] = 1; + LOOKUP[34] = 2; + LOOKUP[35] = 1; + LOOKUP[36] = 3; + LOOKUP[37] = 1; + LOOKUP[38] = 2; + LOOKUP[39] = 1; + LOOKUP[40] = 4; + LOOKUP[41] = 1; + LOOKUP[42] = 2; + LOOKUP[43] = 1; + LOOKUP[44] = 3; + LOOKUP[45] = 1; + LOOKUP[46] = 2; + LOOKUP[47] = 1; + LOOKUP[48] = 5; + LOOKUP[49] = 1; + LOOKUP[50] = 2; + LOOKUP[51] = 1; + LOOKUP[52] = 3; + LOOKUP[53] = 1; + LOOKUP[54] = 2; + LOOKUP[55] = 1; + LOOKUP[56] = 4; + LOOKUP[57] = 1; + LOOKUP[58] = 2; + LOOKUP[59] = 1; + LOOKUP[60] = 3; + LOOKUP[61] = 1; + LOOKUP[62] = 2; + LOOKUP[63] = 1; + LOOKUP[64] = 7; + LOOKUP[65] = 1; + LOOKUP[66] = 2; + LOOKUP[67] = 1; + LOOKUP[68] = 3; + LOOKUP[69] = 1; + LOOKUP[70] = 2; + LOOKUP[71] = 1; + LOOKUP[72] = 4; + LOOKUP[73] = 1; + LOOKUP[74] = 2; + LOOKUP[75] = 1; + LOOKUP[76] = 3; + LOOKUP[77] = 1; + LOOKUP[78] = 2; + LOOKUP[79] = 1; + LOOKUP[80] = 5; + LOOKUP[81] = 1; + LOOKUP[82] = 2; + LOOKUP[83] = 1; + LOOKUP[84] = 3; + LOOKUP[85] = 1; + LOOKUP[86] = 2; + LOOKUP[87] = 1; + LOOKUP[88] = 4; + LOOKUP[89] = 1; + LOOKUP[90] = 2; + LOOKUP[91] = 1; + LOOKUP[92] = 3; + LOOKUP[93] = 1; + LOOKUP[94] = 2; + LOOKUP[95] = 1; + LOOKUP[96] = 6; + LOOKUP[97] = 1; + LOOKUP[98] = 2; + LOOKUP[99] = 1; + LOOKUP[100] = 3; + LOOKUP[101] = 1; + LOOKUP[102] = 2; + LOOKUP[103] = 1; + LOOKUP[104] = 4; + LOOKUP[105] = 1; + LOOKUP[106] = 2; + LOOKUP[107] = 1; + LOOKUP[108] = 3; + LOOKUP[109] = 1; + LOOKUP[110] = 2; + LOOKUP[111] = 1; + LOOKUP[112] = 5; + LOOKUP[113] = 1; + LOOKUP[114] = 2; + LOOKUP[115] = 1; + LOOKUP[116] = 3; + LOOKUP[117] = 1; + LOOKUP[118] = 2; + LOOKUP[119] = 1; + LOOKUP[120] = 4; + LOOKUP[121] = 1; + LOOKUP[122] = 2; + LOOKUP[123] = 1; + LOOKUP[124] = 3; + LOOKUP[125] = 1; + LOOKUP[126] = 2; + LOOKUP[127] = 1; + LOOKUP[128] = 8; + LOOKUP[129] = 1; + LOOKUP[130] = 2; + LOOKUP[131] = 1; + LOOKUP[132] = 3; + LOOKUP[133] = 1; + LOOKUP[134] = 2; + LOOKUP[135] = 1; + LOOKUP[136] = 4; + LOOKUP[137] = 1; + LOOKUP[138] = 2; + LOOKUP[139] = 1; + LOOKUP[140] = 3; + LOOKUP[141] = 1; + LOOKUP[142] = 2; + LOOKUP[143] = 1; + LOOKUP[144] = 5; + LOOKUP[145] = 1; + LOOKUP[146] = 2; + LOOKUP[147] = 1; + LOOKUP[148] = 3; + LOOKUP[149] = 1; + LOOKUP[150] = 2; + LOOKUP[151] = 1; + LOOKUP[152] = 4; + LOOKUP[153] = 1; + LOOKUP[154] = 2; + LOOKUP[155] = 1; + LOOKUP[156] = 3; + LOOKUP[157] = 1; + LOOKUP[158] = 2; + LOOKUP[159] = 1; + LOOKUP[160] = 6; + LOOKUP[161] = 1; + LOOKUP[162] = 2; + LOOKUP[163] = 1; + LOOKUP[164] = 3; + LOOKUP[165] = 1; + LOOKUP[166] = 2; + LOOKUP[167] = 1; + LOOKUP[168] = 4; + LOOKUP[169] = 1; + LOOKUP[170] = 2; + LOOKUP[171] = 1; + LOOKUP[172] = 3; + LOOKUP[173] = 1; + LOOKUP[174] = 2; + LOOKUP[175] = 1; + LOOKUP[176] = 5; + LOOKUP[177] = 1; + LOOKUP[178] = 2; + LOOKUP[179] = 1; + LOOKUP[180] = 3; + LOOKUP[181] = 1; + LOOKUP[182] = 2; + LOOKUP[183] = 1; + LOOKUP[184] = 4; + LOOKUP[185] = 1; + LOOKUP[186] = 2; + LOOKUP[187] = 1; + LOOKUP[188] = 3; + LOOKUP[189] = 1; + LOOKUP[190] = 2; + LOOKUP[191] = 1; + LOOKUP[192] = 7; + LOOKUP[193] = 1; + LOOKUP[194] = 2; + LOOKUP[195] = 1; + LOOKUP[196] = 3; + LOOKUP[197] = 1; + LOOKUP[198] = 2; + LOOKUP[199] = 1; + LOOKUP[200] = 4; + LOOKUP[201] = 1; + LOOKUP[202] = 2; + LOOKUP[203] = 1; + LOOKUP[204] = 3; + LOOKUP[205] = 1; + LOOKUP[206] = 2; + LOOKUP[207] = 1; + LOOKUP[208] = 5; + LOOKUP[209] = 1; + LOOKUP[210] = 2; + LOOKUP[211] = 1; + LOOKUP[212] = 3; + LOOKUP[213] = 1; + LOOKUP[214] = 2; + LOOKUP[215] = 1; + LOOKUP[216] = 4; + LOOKUP[217] = 1; + LOOKUP[218] = 2; + LOOKUP[219] = 1; + LOOKUP[220] = 3; + LOOKUP[221] = 1; + LOOKUP[222] = 2; + LOOKUP[223] = 1; + LOOKUP[224] = 6; + LOOKUP[225] = 1; + LOOKUP[226] = 2; + LOOKUP[227] = 1; + LOOKUP[228] = 3; + LOOKUP[229] = 1; + LOOKUP[230] = 2; + LOOKUP[231] = 1; + LOOKUP[232] = 4; + LOOKUP[233] = 1; + LOOKUP[234] = 2; + LOOKUP[235] = 1; + LOOKUP[236] = 3; + LOOKUP[237] = 1; + LOOKUP[238] = 2; + LOOKUP[239] = 1; + LOOKUP[240] = 5; + LOOKUP[241] = 1; + LOOKUP[242] = 2; + LOOKUP[243] = 1; + LOOKUP[244] = 3; + LOOKUP[245] = 1; + LOOKUP[246] = 2; + LOOKUP[247] = 1; + LOOKUP[248] = 4; + LOOKUP[249] = 1; + LOOKUP[250] = 2; + LOOKUP[251] = 1; + LOOKUP[252] = 3; + LOOKUP[253] = 1; + LOOKUP[254] = 2; + LOOKUP[255] = 1; } } diff --git a/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java b/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java index 82912146625..d285f1cd044 100644 --- a/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java +++ b/hll/src/main/java/org/apache/druid/hll/HyperLogLogCollector.java @@ -64,29 +64,29 @@ public abstract class HyperLogLogCollector implements Comparable> 4) + registerOffset; final int lower = (register & 0x0f) + registerOffset; - minNumRegisterLookup[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower); + MIN_NUM_REGISTER_LOOKUP[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower); } } } // we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2) - private static final int[] numZeroLookup = new int[256]; + private static final int[] NUM_ZERO_LOOKUP = new int[256]; static { - for (int i = 0; i < numZeroLookup.length; ++i) { - numZeroLookup[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0); + for (int i = 0; i < NUM_ZERO_LOOKUP.length; ++i) { + NUM_ZERO_LOOKUP[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0); } } @@ -181,7 +181,7 @@ public abstract class HyperLogLogCollector implements Comparable>> bitsPerBucket) + minNum; + int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum; int lowerNibble = (register & 0x0f) + minNum; if (isUpperNibble) { upperNibble = Math.max(upperNibble, overflowValue); @@ -191,8 +191,8 @@ public abstract class HyperLogLogCollector implements Comparable>> bitsPerBucket) + minNum; + int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum; int lowerNibble = (register & 0x0f) + minNum; if (isUpperNibble) { upperNibble = Math.max(upperNibble, overflowValue); @@ -225,8 +225,8 @@ public abstract class HyperLogLogCollector implements Comparable (registerOffset + range)) { + } else if (positionOf1 > (registerOffset + RANGE)) { final byte currMax = getMaxOverflowValue(); if (positionOf1 > currMax) { - if (currMax <= (registerOffset + range)) { + if (currMax <= (registerOffset + RANGE)) { // this could be optimized by having an add without sanity checks add(getMaxOverflowRegister(), currMax); } @@ -665,7 +665,7 @@ public abstract class HyperLogLogCollector implements Comparable> 1); final boolean isUpperNibble = ((bucket & 0x1) == 0); - final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << bitsPerBucket) : positionOf1; + final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << BITS_PER_BUCKET) : positionOf1; if (storageBuffer.remaining() != getNumBytesForDenseStorage()) { convertToDenseStorage(); @@ -712,7 +712,7 @@ public abstract class HyperLogLogCollector implements Comparable getParameters() @@ -216,13 +216,13 @@ public class HyperLogLogSerdeBenchmarkTest extends AbstractBenchmark { Random rand = new Random(758190); for (long i = 0; i < NUM_HASHES; ++i) { - collector.add(hashFunction.hashLong(rand.nextLong()).asBytes()); + collector.add(HASH_FUNCTION.hashLong(rand.nextLong()).asBytes()); } } private static HashCode getHash(final ByteBuffer byteBuffer) { - Hasher hasher = hashFunction.newHasher(); + Hasher hasher = HASH_FUNCTION.newHasher(); while (byteBuffer.position() < byteBuffer.limit()) { hasher.putByte(byteBuffer.get()); } diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java index 37e81178114..a6e6bcb4eff 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java @@ -83,7 +83,7 @@ import java.util.SortedSet; */ public class HadoopDruidIndexerConfig { - private static final Injector injector; + private static final Injector INJECTOR; public static final String CONFIG_PROPERTY = "druid.indexer.config"; public static final Charset JAVA_NATIVE_CHARSET = Charset.forName("Unicode"); @@ -99,7 +99,7 @@ public class HadoopDruidIndexerConfig static { - injector = Initialization.makeInjectorWithModules( + INJECTOR = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.of( new Module() @@ -118,11 +118,11 @@ public class HadoopDruidIndexerConfig new IndexingHadoopModule() ) ); - JSON_MAPPER = injector.getInstance(ObjectMapper.class); - INDEX_IO = injector.getInstance(IndexIO.class); - INDEX_MERGER_V9 = injector.getInstance(IndexMergerV9.class); - HADOOP_KERBEROS_CONFIG = injector.getInstance(HadoopKerberosConfig.class); - DATA_SEGMENT_PUSHER = injector.getInstance(DataSegmentPusher.class); + JSON_MAPPER = INJECTOR.getInstance(ObjectMapper.class); + INDEX_IO = INJECTOR.getInstance(IndexIO.class); + INDEX_MERGER_V9 = INJECTOR.getInstance(IndexMergerV9.class); + HADOOP_KERBEROS_CONFIG = INJECTOR.getInstance(HadoopKerberosConfig.class); + DATA_SEGMENT_PUSHER = INJECTOR.getInstance(DataSegmentPusher.class); } public enum IndexJobCounters diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java index 79e6d836dc4..5ee8d5e11de 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/IndexGeneratorJob.java @@ -317,7 +317,7 @@ public class IndexGeneratorJob implements Jobby public static class IndexGeneratorMapper extends HadoopDruidIndexerMapper { - private static final HashFunction hashFunction = Hashing.murmur3_128(); + private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(); private AggregatorFactory[] aggregators; @@ -364,7 +364,7 @@ public class IndexGeneratorJob implements Jobby final long truncatedTimestamp = granularitySpec.getQueryGranularity() .bucketStart(inputRow.getTimestamp()) .getMillis(); - final byte[] hashedDimensions = hashFunction.hashBytes( + final byte[] hashedDimensions = HASH_FUNCTION.hashBytes( HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes( Rows.toGroupKey( truncatedTimestamp, diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java index 490c314fb21..d026273a1a1 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java @@ -53,7 +53,7 @@ import java.util.concurrent.atomic.AtomicBoolean; public class Utils { private static final Logger log = new Logger(Utils.class); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting) throws IOException @@ -120,7 +120,7 @@ public class Utils { FileSystem fs = statsPath.getFileSystem(job.getConfiguration()); - return jsonMapper.readValue( + return JSON_MAPPER.readValue( fs.open(statsPath), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT ); @@ -128,7 +128,7 @@ public class Utils public static void storeStats(JobContext job, Path path, Map stats) throws IOException { - jsonMapper.writeValue(makePathAndOutputStream(job, path, true), stats); + JSON_MAPPER.writeValue(makePathAndOutputStream(job, path, true), stats); } public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper) diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java index 9c723ac9abd..47b430780c6 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopDruidIndexerConfigTest.java @@ -45,11 +45,11 @@ import java.util.List; */ public class HadoopDruidIndexerConfigTest { - private static final ObjectMapper jsonMapper; + private static final ObjectMapper JSON_MAPPER; static { - jsonMapper = new DefaultObjectMapper(); - jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper)); + JSON_MAPPER = new DefaultObjectMapper(); + JSON_MAPPER.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, JSON_MAPPER)); } @Test @@ -75,7 +75,7 @@ public class HadoopDruidIndexerConfigTest ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), null, - jsonMapper + JSON_MAPPER ), new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), new HadoopTuningConfig( @@ -144,7 +144,7 @@ public class HadoopDruidIndexerConfigTest ImmutableList.of(Intervals.of("2010-01-01/P1D")) ), null, - jsonMapper + JSON_MAPPER ), new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null), new HadoopTuningConfig( diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index c915a4dc59a..844e26659b6 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -51,11 +51,11 @@ import java.util.Map; */ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest { - private static final String testDatasource = "test"; - private static final String testDatasource2 = "test2"; - private static final Interval testDatasourceInterval = Intervals.of("1970/3000"); - private static final Interval testDatasourceInterval2 = Intervals.of("2000/2001"); - private static final Interval testDatasourceIntervalPartial = Intervals.of("2050/3000"); + private static final String TEST_DATA_SOURCE = "test"; + private static final String TEST_DATA_SOURCE2 = "test2"; + private static final Interval TEST_DATA_SOURCE_INTERVAL = Intervals.of("1970/3000"); + private static final Interval TEST_DATA_SOURCE_INTERVAL2 = Intervals.of("2000/2001"); + private static final Interval TEST_DATA_SOURCE_INTERVAL_PARTIAL = Intervals.of("2050/3000"); private final ObjectMapper jsonMapper; @@ -70,7 +70,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest } private static final DataSegment SEGMENT = new DataSegment( - testDatasource, + TEST_DATA_SOURCE, Intervals.of("2000/3000"), "ver", ImmutableMap.of( @@ -85,7 +85,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); private static final DataSegment SEGMENT2 = new DataSegment( - testDatasource2, + TEST_DATA_SOURCE2, Intervals.of("2000/3000"), "ver2", ImmutableMap.of( @@ -112,13 +112,13 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest { PathSpec pathSpec = new DatasourcePathSpec( null, - new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null), + new DatasourceIngestionSpec(TEST_DATA_SOURCE, TEST_DATA_SOURCE_INTERVAL, null, null, null, null, null, false, null), null, false ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( pathSpec, - testDatasourceInterval + TEST_DATA_SOURCE_INTERVAL ); Assert.assertEquals( ImmutableList.of(WindowedDataSegment.of(SEGMENT)), @@ -132,8 +132,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest PathSpec pathSpec = new DatasourcePathSpec( null, new DatasourceIngestionSpec( - testDatasource, - testDatasourceInterval, + TEST_DATA_SOURCE, + TEST_DATA_SOURCE_INTERVAL, null, ImmutableList.of(SEGMENT), null, @@ -147,7 +147,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( pathSpec, - testDatasourceInterval + TEST_DATA_SOURCE_INTERVAL ); Assert.assertEquals( ImmutableList.of(WindowedDataSegment.of(SEGMENT)), @@ -161,8 +161,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest PathSpec pathSpec = new DatasourcePathSpec( null, new DatasourceIngestionSpec( - testDatasource, - testDatasourceInterval, + TEST_DATA_SOURCE, + TEST_DATA_SOURCE_INTERVAL, null, ImmutableList.of(SEGMENT.withVersion("v2")), null, @@ -176,7 +176,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( pathSpec, - testDatasourceInterval + TEST_DATA_SOURCE_INTERVAL ); } @@ -187,8 +187,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest PathSpec pathSpec = new DatasourcePathSpec( null, new DatasourceIngestionSpec( - testDatasource, - testDatasourceIntervalPartial, + TEST_DATA_SOURCE, + TEST_DATA_SOURCE_INTERVAL_PARTIAL, null, null, null, @@ -202,10 +202,10 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( pathSpec, - testDatasourceIntervalPartial + TEST_DATA_SOURCE_INTERVAL_PARTIAL ); Assert.assertEquals( - ImmutableList.of(new WindowedDataSegment(SEGMENT, testDatasourceIntervalPartial)), + ImmutableList.of(new WindowedDataSegment(SEGMENT, TEST_DATA_SOURCE_INTERVAL_PARTIAL)), ((DatasourcePathSpec) config.getPathSpec()).getSegments() ); } @@ -219,8 +219,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest new DatasourcePathSpec( null, new DatasourceIngestionSpec( - testDatasource, - testDatasourceInterval, + TEST_DATA_SOURCE, + TEST_DATA_SOURCE_INTERVAL, null, null, null, @@ -235,8 +235,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest new DatasourcePathSpec( null, new DatasourceIngestionSpec( - testDatasource2, - testDatasourceInterval2, + TEST_DATA_SOURCE2, + TEST_DATA_SOURCE_INTERVAL2, null, null, null, @@ -252,14 +252,14 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( pathSpec, - testDatasourceInterval + TEST_DATA_SOURCE_INTERVAL ); Assert.assertEquals( ImmutableList.of(WindowedDataSegment.of(SEGMENT)), ((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(1)).getSegments() ); Assert.assertEquals( - ImmutableList.of(new WindowedDataSegment(SEGMENT2, testDatasourceInterval2)), + ImmutableList.of(new WindowedDataSegment(SEGMENT2, TEST_DATA_SOURCE_INTERVAL2)), ((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(2)).getSegments() ); } @@ -300,15 +300,15 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest EasyMock.expect( segmentLister.getUsedSegmentsForIntervals( - testDatasource, - Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval) : null) + TEST_DATA_SOURCE, + Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL) : null) ) ).andReturn(ImmutableList.of(SEGMENT)); EasyMock.expect( segmentLister.getUsedSegmentsForIntervals( - testDatasource2, - Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval2) : null) + TEST_DATA_SOURCE2, + Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL2) : null) ) ).andReturn(ImmutableList.of(SEGMENT2)); diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopTuningConfigTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopTuningConfigTest.java index ef29cb738f8..c1a7e954897 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopTuningConfigTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/HadoopTuningConfigTest.java @@ -32,7 +32,7 @@ import java.util.List; */ public class HadoopTuningConfigTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testSerde() throws Exception @@ -64,7 +64,7 @@ public class HadoopTuningConfigTest null ); - HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class); + HadoopTuningConfig actual = jsonReadWriteRead(JSON_MAPPER.writeValueAsString(expected), HadoopTuningConfig.class); Assert.assertEquals("/tmp/workingpath", actual.getWorkingPath()); Assert.assertEquals("version", actual.getVersion()); @@ -88,7 +88,7 @@ public class HadoopTuningConfigTest public static T jsonReadWriteRead(String s, Class klass) { try { - return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass); + return JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsBytes(JSON_MAPPER.readValue(s, klass)), klass); } catch (Exception e) { throw new RuntimeException(e); diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java index a7757ac3034..4cc8ac5018f 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/IndexGeneratorJobTest.java @@ -84,12 +84,12 @@ import java.util.TreeMap; @RunWith(Parameterized.class) public class IndexGeneratorJobTest { - private static final AggregatorFactory[] aggs1 = { + private static final AggregatorFactory[] AGGS1 = { new LongSumAggregatorFactory("visited_num", "visited_num"), new HyperUniquesAggregatorFactory("unique_hosts", "host") }; - private static final AggregatorFactory[] aggs2 = { + private static final AggregatorFactory[] AGGS2 = { new CountAggregatorFactory("count") }; @@ -156,7 +156,7 @@ public class IndexGeneratorJobTest ), null, null, - aggs1, + AGGS1, "website" }, { @@ -204,7 +204,7 @@ public class IndexGeneratorJobTest ), null, null, - aggs1, + AGGS1, "website" }, { @@ -253,7 +253,7 @@ public class IndexGeneratorJobTest ), null, null, - aggs1, + AGGS1, "website" }, { @@ -311,7 +311,7 @@ public class IndexGeneratorJobTest ), null, null, - aggs1, + AGGS1, "website" }, { @@ -344,7 +344,7 @@ public class IndexGeneratorJobTest ), 1, // force 1 row max per index for easier testing null, - aggs2, + AGGS2, "inherit_dims" }, { @@ -377,7 +377,7 @@ public class IndexGeneratorJobTest ), 1, // force 1 row max per index for easier testing null, - aggs2, + AGGS2, "inherit_dims2" } } diff --git a/indexing-hadoop/src/test/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpecTest.java b/indexing-hadoop/src/test/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpecTest.java index 5172c62d25a..83484d0060d 100644 --- a/indexing-hadoop/src/test/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpecTest.java +++ b/indexing-hadoop/src/test/java/org/apache/druid/indexer/updater/MetadataStorageUpdaterJobSpecTest.java @@ -25,7 +25,7 @@ import org.junit.Test; public class MetadataStorageUpdaterJobSpecTest { - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Test public void testMetadaStorageConnectionConfigSimplePassword() throws Exception @@ -62,7 +62,7 @@ public class MetadataStorageUpdaterJobSpecTest String pwd ) throws Exception { - MetadataStorageUpdaterJobSpec spec = jsonMapper.readValue( + MetadataStorageUpdaterJobSpec spec = JSON_MAPPER.readValue( "{" + "\"type\": \"" + type + "\",\n" + "\"connectURI\": \"" + connectURI + "\",\n" + diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/index/RealtimeAppenderatorTuningConfig.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/index/RealtimeAppenderatorTuningConfig.java index ac00d60637f..cbb2dcebc02 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/index/RealtimeAppenderatorTuningConfig.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/index/RealtimeAppenderatorTuningConfig.java @@ -39,14 +39,14 @@ import java.io.File; @JsonTypeName("realtime_appenderator") public class RealtimeAppenderatorTuningConfig implements TuningConfig, AppenderatorConfig { - private static final int defaultMaxRowsInMemory = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY; - private static final Period defaultIntermediatePersistPeriod = new Period("PT10M"); - private static final int defaultMaxPendingPersists = 0; - private static final ShardSpec defaultShardSpec = new NumberedShardSpec(0, 1); - private static final IndexSpec defaultIndexSpec = new IndexSpec(); - private static final Boolean defaultReportParseExceptions = Boolean.FALSE; - private static final long defaultPublishAndHandoffTimeout = 0; - private static final long defaultAlertTimeout = 0; + private static final int DEFAULT_MAX_ROWS_IN_MEMORY = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY; + private static final Period DEFAULT_INTERMEDIATE_PERSIST_PERIOD = new Period("PT10M"); + private static final int DEFAULT_MAX_PENDING_PERSISTS = 0; + private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1); + private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec(); + private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE; + private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0; + private static final long DEFAULT_ALERT_TIMEOUT = 0; private static File createNewBasePersistDirectory() { @@ -93,29 +93,29 @@ public class RealtimeAppenderatorTuningConfig implements TuningConfig, Appendera @JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions ) { - this.maxRowsInMemory = maxRowsInMemory == null ? defaultMaxRowsInMemory : maxRowsInMemory; + this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY : maxRowsInMemory; // initializing this to 0, it will be lazily intialized to a value // @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long) this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory; this.partitionsSpec = new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows); this.intermediatePersistPeriod = intermediatePersistPeriod == null - ? defaultIntermediatePersistPeriod + ? DEFAULT_INTERMEDIATE_PERSIST_PERIOD : intermediatePersistPeriod; this.basePersistDirectory = basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory; - this.maxPendingPersists = maxPendingPersists == null ? defaultMaxPendingPersists : maxPendingPersists; - this.shardSpec = shardSpec == null ? defaultShardSpec : shardSpec; - this.indexSpec = indexSpec == null ? defaultIndexSpec : indexSpec; + this.maxPendingPersists = maxPendingPersists == null ? DEFAULT_MAX_PENDING_PERSISTS : maxPendingPersists; + this.shardSpec = shardSpec == null ? DEFAULT_SHARD_SPEC : shardSpec; + this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec; this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ? this.indexSpec : indexSpecForIntermediatePersists; this.reportParseExceptions = reportParseExceptions == null - ? defaultReportParseExceptions + ? DEFAULT_REPORT_PARSE_EXCEPTIONS : reportParseExceptions; this.publishAndHandoffTimeout = publishAndHandoffTimeout == null - ? defaultPublishAndHandoffTimeout + ? DEFAULT_HANDOFF_CONDITION_TIMEOUT : publishAndHandoffTimeout; Preconditions.checkArgument(this.publishAndHandoffTimeout >= 0, "publishAndHandoffTimeout must be >= 0"); - this.alertTimeout = alertTimeout == null ? defaultAlertTimeout : alertTimeout; + this.alertTimeout = alertTimeout == null ? DEFAULT_ALERT_TIMEOUT : alertTimeout; Preconditions.checkArgument(this.alertTimeout >= 0, "alertTimeout must be >= 0"); this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory; diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java index 86ac882e87d..6cb02888eaa 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopIndexTask.java @@ -703,7 +703,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler // can be injected based on the configuration given in config.getSchema().getIOConfig().getMetadataUpdateSpec() final MetadataStorageUpdaterJobHandler maybeHandler; if (config.isUpdaterJobSpecSet()) { - maybeHandler = injector.getInstance(MetadataStorageUpdaterJobHandler.class); + maybeHandler = INJECTOR.getInstance(MetadataStorageUpdaterJobHandler.class); } else { maybeHandler = null; } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java index f32ab28b38b..8bbe5ca65d4 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/HadoopTask.java @@ -47,12 +47,12 @@ import java.util.Map; public abstract class HadoopTask extends AbstractBatchIndexTask { private static final Logger log = new Logger(HadoopTask.class); - private static final ExtensionsConfig extensionsConfig; + private static final ExtensionsConfig EXTENSIONS_CONFIG; - static final Injector injector = GuiceInjectors.makeStartupInjector(); + static final Injector INJECTOR = GuiceInjectors.makeStartupInjector(); static { - extensionsConfig = injector.getInstance(ExtensionsConfig.class); + EXTENSIONS_CONFIG = INJECTOR.getInstance(ExtensionsConfig.class); } private final List hadoopDependencyCoordinates; @@ -143,7 +143,7 @@ public abstract class HadoopTask extends AbstractBatchIndexTask ); final List extensionURLs = new ArrayList<>(); - for (final File extension : Initialization.getExtensionFilesToLoad(extensionsConfig)) { + for (final File extension : Initialization.getExtensionFilesToLoad(EXTENSIONS_CONFIG)) { final ClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false); extensionURLs.addAll(Arrays.asList(((URLClassLoader) extensionLoader).getURLs())); } @@ -156,7 +156,7 @@ public abstract class HadoopTask extends AbstractBatchIndexTask for (final File hadoopDependency : Initialization.getHadoopDependencyFilesToLoad( finalHadoopDependencyCoordinates, - extensionsConfig + EXTENSIONS_CONFIG )) { final ClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency, false); localClassLoaderURLs.addAll(Arrays.asList(((URLClassLoader) hadoopLoader).getURLs())); @@ -168,15 +168,15 @@ public abstract class HadoopTask extends AbstractBatchIndexTask ); final String hadoopContainerDruidClasspathJars; - if (extensionsConfig.getHadoopContainerDruidClasspath() == null) { + if (EXTENSIONS_CONFIG.getHadoopContainerDruidClasspath() == null) { hadoopContainerDruidClasspathJars = Joiner.on(File.pathSeparator).join(jobURLs); } else { List hadoopContainerURLs = Lists.newArrayList( - Initialization.getURLsForClasspath(extensionsConfig.getHadoopContainerDruidClasspath()) + Initialization.getURLsForClasspath(EXTENSIONS_CONFIG.getHadoopContainerDruidClasspath()) ); - if (extensionsConfig.getAddExtensionsToHadoopContainer()) { + if (EXTENSIONS_CONFIG.getAddExtensionsToHadoopContainer()) { hadoopContainerURLs.addAll(extensionURLs); } diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java index a02736e2194..572f2fffb85 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/IndexTask.java @@ -120,7 +120,7 @@ import java.util.concurrent.TimeoutException; public class IndexTask extends AbstractBatchIndexTask implements ChatHandler { private static final Logger log = new Logger(IndexTask.class); - private static final HashFunction hashFunction = Hashing.murmur3_128(); + private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(); private static final String TYPE = "index"; private static String makeGroupId(IndexIngestionSpec ingestionSchema) @@ -717,7 +717,7 @@ public class IndexTask extends AbstractBatchIndexTask implements ChatHandler inputRow ); hllCollectors.get(interval).get() - .add(hashFunction.hashBytes(jsonMapper.writeValueAsBytes(groupKey)).asBytes()); + .add(HASH_FUNCTION.hashBytes(jsonMapper.writeValueAsBytes(groupKey)).asBytes()); } else { // we don't need to determine partitions but we still need to determine intervals, so add an Optional.absent() // for the interval and don't instantiate a HLL collector diff --git a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/NoopTask.java b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/NoopTask.java index 8fcf252f39e..aa7efbbcb1f 100644 --- a/indexing-service/src/main/java/org/apache/druid/indexing/common/task/NoopTask.java +++ b/indexing-service/src/main/java/org/apache/druid/indexing/common/task/NoopTask.java @@ -43,9 +43,9 @@ import java.util.UUID; public class NoopTask extends AbstractTask { private static final Logger log = new Logger(NoopTask.class); - private static final int defaultRunTime = 2500; - private static final int defaultIsReadyTime = 0; - private static final IsReadyResult defaultIsReadyResult = IsReadyResult.YES; + private static final int DEFAULT_RUN_TIME = 2500; + private static final int DEFAULT_IS_READY_TIME = 0; + private static final IsReadyResult DEFAULT_IS_READY_RESULT = IsReadyResult.YES; enum IsReadyResult { @@ -86,10 +86,10 @@ public class NoopTask extends AbstractTask context ); - this.runTime = (runTime == 0) ? defaultRunTime : runTime; - this.isReadyTime = (isReadyTime == 0) ? defaultIsReadyTime : isReadyTime; + this.runTime = (runTime == 0) ? DEFAULT_RUN_TIME : runTime; + this.isReadyTime = (isReadyTime == 0) ? DEFAULT_IS_READY_TIME : isReadyTime; this.isReadyResult = (isReadyResult == null) - ? defaultIsReadyResult + ? DEFAULT_IS_READY_RESULT : IsReadyResult.valueOf(StringUtils.toUpperCase(isReadyResult)); this.firehoseFactory = firehoseFactory; } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java index 691022e7bc9..c5382836419 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/AppenderatorDriverRealtimeIndexTaskTest.java @@ -167,12 +167,12 @@ import java.util.regex.Pattern; public class AppenderatorDriverRealtimeIndexTaskTest { private static final Logger log = new Logger(AppenderatorDriverRealtimeIndexTaskTest.class); - private static final ServiceEmitter emitter = new ServiceEmitter( + private static final ServiceEmitter EMITTER = new ServiceEmitter( "service", "host", new NoopEmitter() ); - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String FAIL_DIM = "__fail__"; @@ -280,8 +280,8 @@ public class AppenderatorDriverRealtimeIndexTaskTest @Before public void setUp() throws IOException { - EmittingLogger.registerEmitter(emitter); - emitter.start(); + EmittingLogger.registerEmitter(EMITTER); + EMITTER.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); now = DateTimes.nowUtc(); @@ -1403,7 +1403,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest new AggregatorFactory[]{new CountAggregatorFactory("rows"), new LongSumAggregatorFactory("met1", "met1")}, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), transformSpec, - objectMapper + OBJECT_MAPPER ); RealtimeIOConfig realtimeIOConfig = new RealtimeIOConfig( new TestFirehoseFactory(), @@ -1529,7 +1529,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest taskLockbox, taskStorage, mdc, - emitter, + EMITTER, EasyMock.createMock(SupervisorManager.class) ); final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory( @@ -1604,7 +1604,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest taskConfig, new DruidNode("druid/middlemanager", "localhost", false, 8091, null, true, false), taskActionClientFactory, - emitter, + EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), null, // DataSegmentMover @@ -1658,7 +1658,7 @@ public class AppenderatorDriverRealtimeIndexTaskTest private IngestionStatsAndErrorsTaskReportData getTaskReportData() throws IOException { - Map taskReports = objectMapper.readValue( + Map taskReports = OBJECT_MAPPER.readValue( reportsFile, new TypeReference>() { diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java index fbac1e42c40..a790ac474a4 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskRunTest.java @@ -121,7 +121,7 @@ public class CompactionTaskRunTest extends IngestionTestBase ); } - private static final RetryPolicyFactory retryPolicyFactory = new RetryPolicyFactory(new RetryPolicyConfig()); + private static final RetryPolicyFactory RETRY_POLICY_FACTORY = new RetryPolicyFactory(new RetryPolicyConfig()); private final RowIngestionMetersFactory rowIngestionMetersFactory; private final CoordinatorClient coordinatorClient; private final SegmentLoaderFactory segmentLoaderFactory; @@ -171,7 +171,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -215,7 +215,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -291,7 +291,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -388,7 +388,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -441,7 +441,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -488,7 +488,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); @@ -546,7 +546,7 @@ public class CompactionTaskRunTest extends IngestionTestBase rowIngestionMetersFactory, coordinatorClient, segmentLoaderFactory, - retryPolicyFactory, + RETRY_POLICY_FACTORY, appenderatorsManager ); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java index 337da8c8e58..8cb99452a80 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/IndexTaskTest.java @@ -141,7 +141,7 @@ public class IndexTaskTest extends IngestionTestBase ); } - private static final IndexSpec indexSpec = new IndexSpec(); + private static final IndexSpec INDEX_SPEC = new IndexSpec(); private final ObjectMapper jsonMapper; private AppenderatorsManager appenderatorsManager; private final IndexIO indexIO; @@ -997,7 +997,7 @@ public class IndexTaskTest extends IngestionTestBase null, null, new HashedPartitionsSpec(2, null, null), - indexSpec, + INDEX_SPEC, null, null, true, @@ -1122,7 +1122,7 @@ public class IndexTaskTest extends IngestionTestBase null, null, new DynamicPartitionsSpec(2, null), - indexSpec, + INDEX_SPEC, null, null, false, @@ -1240,7 +1240,7 @@ public class IndexTaskTest extends IngestionTestBase null, null, new HashedPartitionsSpec(2, null, null), - indexSpec, + INDEX_SPEC, null, null, true, @@ -1669,7 +1669,7 @@ public class IndexTaskTest extends IngestionTestBase numShards, partitionDimensions, null, - indexSpec, + INDEX_SPEC, null, null, forceGuaranteedRollup, diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java index 7456ed44673..121f085aeaa 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/task/RealtimeIndexTaskTest.java @@ -144,7 +144,7 @@ import java.util.concurrent.Executor; public class RealtimeIndexTaskTest { private static final Logger log = new Logger(RealtimeIndexTaskTest.class); - private static final ServiceEmitter emitter = new ServiceEmitter( + private static final ServiceEmitter EMITTER = new ServiceEmitter( "service", "host", new NoopEmitter() @@ -163,8 +163,8 @@ public class RealtimeIndexTaskTest @Before public void setUp() { - EmittingLogger.registerEmitter(emitter); - emitter.start(); + EmittingLogger.registerEmitter(EMITTER); + EMITTER.start(); taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d")); now = DateTimes.nowUtc(); } @@ -894,7 +894,7 @@ public class RealtimeIndexTaskTest taskLockbox, taskStorage, mdc, - emitter, + EMITTER, EasyMock.createMock(SupervisorManager.class) ); final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory( @@ -978,7 +978,7 @@ public class RealtimeIndexTaskTest taskConfig, null, // taskExecutorNode taskActionClientFactory, - emitter, + EMITTER, new TestDataSegmentPusher(), new TestDataSegmentKiller(), null, // DataSegmentMover diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java index 178611ec1d6..55754ffbdd2 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/firehose/IngestSegmentFirehoseFactoryTest.java @@ -143,13 +143,13 @@ public class IngestSegmentFirehoseFactoryTest @Override public List getUsedSegmentsForInterval(String dataSource, Interval interval) { - return ImmutableList.copyOf(segmentSet); + return ImmutableList.copyOf(SEGMENT_SET); } @Override public List getUsedSegmentsForIntervals(String dataSource, List interval) { - return ImmutableList.copyOf(segmentSet); + return ImmutableList.copyOf(SEGMENT_SET); } @Override @@ -204,17 +204,17 @@ public class IngestSegmentFirehoseFactoryTest index.add(ROW_PARSER.parseBatch(buildRow(i.longValue())).get(0)); } - if (!persistDir.mkdirs() && !persistDir.exists()) { - throw new IOE("Could not create directory at [%s]", persistDir.getAbsolutePath()); + if (!PERSIST_DIR.mkdirs() && !PERSIST_DIR.exists()) { + throw new IOE("Could not create directory at [%s]", PERSIST_DIR.getAbsolutePath()); } - INDEX_MERGER_V9.persist(index, persistDir, indexSpec, null); + INDEX_MERGER_V9.persist(index, PERSIST_DIR, indexSpec, null); final CoordinatorClient cc = new CoordinatorClient(null, null) { @Override public List getDatabaseSegmentDataSourceSegments(String dataSource, List intervals) { - return ImmutableList.copyOf(segmentSet); + return ImmutableList.copyOf(SEGMENT_SET); } }; @@ -345,9 +345,9 @@ public class IngestSegmentFirehoseFactoryTest private static final String TIME_COLUMN = "ts"; private static final Integer MAX_SHARD_NUMBER = 10; private static final Integer MAX_ROWS = 10; - private static final File tmpDir = Files.createTempDir(); - private static final File persistDir = Paths.get(tmpDir.getAbsolutePath(), "indexTestMerger").toFile(); - private static final List segmentSet = new ArrayList<>(MAX_SHARD_NUMBER); + private static final File TMP_DIR = Files.createTempDir(); + private static final File PERSIST_DIR = Paths.get(TMP_DIR.getAbsolutePath(), "indexTestMerger").toFile(); + private static final List SEGMENT_SET = new ArrayList<>(MAX_SHARD_NUMBER); private final FirehoseFactory factory; private final InputRowParser rowParser; @@ -384,7 +384,7 @@ public class IngestSegmentFirehoseFactoryTest DATA_SOURCE_VERSION, ImmutableMap.of( "type", "local", - "path", persistDir.getAbsolutePath() + "path", PERSIST_DIR.getAbsolutePath() ), ImmutableList.of(DIM_NAME), ImmutableList.of(METRIC_LONG_NAME, METRIC_FLOAT_NAME), @@ -401,14 +401,14 @@ public class IngestSegmentFirehoseFactoryTest public static void setUpStatic() { for (int i = 0; i < MAX_SHARD_NUMBER; ++i) { - segmentSet.add(buildSegment(i)); + SEGMENT_SET.add(buildSegment(i)); } } @AfterClass public static void tearDownStatic() { - recursivelyDelete(tmpDir); + recursivelyDelete(TMP_DIR); } private static void recursivelyDelete(final File dir) @@ -465,9 +465,9 @@ public class IngestSegmentFirehoseFactoryTest @Test public void simpleFirehoseReadingTest() throws IOException { - Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), segmentSet.size()); + Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), SEGMENT_SET.size()); Integer rowcount = 0; - try (final Firehose firehose = factory.connect(rowParser, tmpDir)) { + try (final Firehose firehose = factory.connect(rowParser, TMP_DIR)) { while (firehose.hasMore()) { InputRow row = firehose.nextRow(); Assert.assertArrayEquals(new String[]{DIM_NAME}, row.getDimensions().toArray()); @@ -487,7 +487,7 @@ public class IngestSegmentFirehoseFactoryTest @Test public void testTransformSpec() throws IOException { - Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), segmentSet.size()); + Assert.assertEquals(MAX_SHARD_NUMBER.longValue(), SEGMENT_SET.size()); Integer rowcount = 0; final TransformSpec transformSpec = new TransformSpec( new SelectorDimFilter(ColumnHolder.TIME_COLUMN_NAME, "1", null), @@ -497,7 +497,7 @@ public class IngestSegmentFirehoseFactoryTest ); int skipped = 0; try (final Firehose firehose = - factory.connect(transformSpec.decorate(rowParser), tmpDir)) { + factory.connect(transformSpec.decorate(rowParser), TMP_DIR)) { while (firehose.hasMore()) { InputRow row = firehose.nextRow(); if (row == null) { diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java index 4cc352fe9ee..3abe8edc779 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerRunPendingTasksConcurrencyTest.java @@ -157,8 +157,8 @@ public class RemoteTaskRunnerRunPendingTasksConcurrencyTest } ZooKeeper zk = rtrTestUtils.getCuratorFramework().getZookeeperClient().getZooKeeper(); - while (zk.getChildren(rtrTestUtils.tasksPath + "/worker0", false).size() < 1 - && zk.getChildren(rtrTestUtils.tasksPath + "/worker1", false).size() < 1) { + while (zk.getChildren(rtrTestUtils.TASKS_PATH + "/worker0", false).size() < 1 + && zk.getChildren(rtrTestUtils.TASKS_PATH + "/worker1", false).size() < 1) { Thread.sleep(5); } } @@ -170,8 +170,8 @@ public class RemoteTaskRunnerRunPendingTasksConcurrencyTest } ZooKeeper zk = rtrTestUtils.getCuratorFramework().getZookeeperClient().getZooKeeper(); - while (zk.getChildren(rtrTestUtils.tasksPath + "/worker0", false).size() < 1 - || zk.getChildren(rtrTestUtils.tasksPath + "/worker1", false).size() < 1) { + while (zk.getChildren(rtrTestUtils.TASKS_PATH + "/worker0", false).size() < 1 + || zk.getChildren(rtrTestUtils.TASKS_PATH + "/worker1", false).size() < 1) { Thread.sleep(5); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java index 15d87d43be1..060164887b9 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTest.java @@ -61,10 +61,10 @@ import java.util.concurrent.TimeUnit; public class RemoteTaskRunnerTest { - private static final Joiner joiner = RemoteTaskRunnerTestUtils.joiner; - private static final String workerHost = "worker"; - private static final String announcementsPath = joiner.join(RemoteTaskRunnerTestUtils.announcementsPath, workerHost); - private static final String statusPath = joiner.join(RemoteTaskRunnerTestUtils.statusPath, workerHost); + private static final Joiner JOINER = RemoteTaskRunnerTestUtils.JOINER; + private static final String WORKER_HOST = "worker"; + private static final String ANNOUCEMENTS_PATH = JOINER.join(RemoteTaskRunnerTestUtils.ANNOUNCEMENTS_PATH, WORKER_HOST); + private static final String STATUS_PATH = JOINER.join(RemoteTaskRunnerTestUtils.STATUS_PATH, WORKER_HOST); private RemoteTaskRunner remoteTaskRunner; private RemoteTaskRunnerTestUtils rtrTestUtils = new RemoteTaskRunnerTestUtils(); @@ -334,7 +334,7 @@ public class RemoteTaskRunnerTest Assert.assertTrue(remoteTaskRunner.getRunningTasks().iterator().next().getTaskId().equals(task.getId())); - cf.delete().forPath(joiner.join(statusPath, task.getId())); + cf.delete().forPath(JOINER.join(STATUS_PATH, task.getId())); TaskStatus status = future.get(); @@ -425,7 +425,7 @@ public class RemoteTaskRunnerTest Assert.assertTrue(workerRunningTask(task.getId())); - cf.delete().forPath(announcementsPath); + cf.delete().forPath(ANNOUCEMENTS_PATH); TaskStatus status = future.get(); @@ -445,7 +445,7 @@ public class RemoteTaskRunnerTest config.getTaskCleanupTimeout().toStandardDuration().getMillis() * 2 ) ); - Assert.assertNull(cf.checkExists().forPath(statusPath)); + Assert.assertNull(cf.checkExists().forPath(STATUS_PATH)); } @Test @@ -511,7 +511,7 @@ public class RemoteTaskRunnerTest private void makeWorker() throws Exception { - worker = rtrTestUtils.makeWorker(workerHost, 3); + worker = rtrTestUtils.makeWorker(WORKER_HOST, 3); } private void disableWorker() throws Exception @@ -521,12 +521,12 @@ public class RemoteTaskRunnerTest private boolean taskAnnounced(final String taskId) { - return rtrTestUtils.taskAnnounced(workerHost, taskId); + return rtrTestUtils.taskAnnounced(WORKER_HOST, taskId); } private boolean workerRunningTask(final String taskId) { - return rtrTestUtils.workerRunningTask(workerHost, taskId); + return rtrTestUtils.workerRunningTask(WORKER_HOST, taskId); } private boolean workerCompletedTask(final ListenableFuture result) @@ -649,8 +649,8 @@ public class RemoteTaskRunnerTest mockWorkerRunningTask(task); Assert.assertTrue(workerRunningTask(task.getId())); - byte[] bytes = cf.getData().forPath(announcementsPath); - cf.delete().forPath(announcementsPath); + byte[] bytes = cf.getData().forPath(ANNOUCEMENTS_PATH); + cf.delete().forPath(ANNOUCEMENTS_PATH); // worker task cleanup scheduled Assert.assertTrue( TestUtils.conditionValid( @@ -666,7 +666,7 @@ public class RemoteTaskRunnerTest ); // Worker got reconnected - cf.create().forPath(announcementsPath, bytes); + cf.create().forPath(ANNOUCEMENTS_PATH, bytes); // worker task cleanup should get cancelled and removed Assert.assertTrue( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java index 593f0053401..ed0713dd23b 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/RemoteTaskRunnerTestUtils.java @@ -54,11 +54,11 @@ import java.util.concurrent.atomic.AtomicReference; */ public class RemoteTaskRunnerTestUtils { - static final Joiner joiner = Joiner.on("/"); - static final String basePath = "/test/druid"; - static final String announcementsPath = StringUtils.format("%s/indexer/announcements", basePath); - static final String tasksPath = StringUtils.format("%s/indexer/tasks", basePath); - static final String statusPath = StringUtils.format("%s/indexer/status", basePath); + static final Joiner JOINER = Joiner.on("/"); + static final String BASE_PATH = "/test/druid"; + static final String ANNOUNCEMENTS_PATH = StringUtils.format("%s/indexer/announcements", BASE_PATH); + static final String TASKS_PATH = StringUtils.format("%s/indexer/tasks", BASE_PATH); + static final String STATUS_PATH = StringUtils.format("%s/indexer/status", BASE_PATH); static final TaskLocation DUMMY_LOCATION = TaskLocation.create("dummy", 9000, -1); private TestingCluster testingCluster; @@ -94,8 +94,8 @@ public class RemoteTaskRunnerTestUtils .build(); cf.start(); cf.blockUntilConnected(); - cf.create().creatingParentsIfNeeded().forPath(basePath); - cf.create().creatingParentsIfNeeded().forPath(tasksPath); + cf.create().creatingParentsIfNeeded().forPath(BASE_PATH); + cf.create().creatingParentsIfNeeded().forPath(TASKS_PATH); } void tearDown() throws Exception @@ -124,7 +124,7 @@ public class RemoteTaskRunnerTestUtils @Override public String getBase() { - return basePath; + return BASE_PATH; } }, null, null, null, null ), @@ -150,10 +150,10 @@ public class RemoteTaskRunnerTestUtils ); cf.create().creatingParentsIfNeeded().withMode(CreateMode.EPHEMERAL).forPath( - joiner.join(announcementsPath, workerId), + JOINER.join(ANNOUNCEMENTS_PATH, workerId), jsonMapper.writeValueAsBytes(worker) ); - cf.create().creatingParentsIfNeeded().forPath(joiner.join(tasksPath, workerId)); + cf.create().creatingParentsIfNeeded().forPath(JOINER.join(TASKS_PATH, workerId)); return worker; } @@ -161,16 +161,16 @@ public class RemoteTaskRunnerTestUtils void disableWorker(Worker worker) throws Exception { cf.setData().forPath( - joiner.join(announcementsPath, worker.getHost()), + JOINER.join(ANNOUNCEMENTS_PATH, worker.getHost()), jsonMapper.writeValueAsBytes(new Worker(worker.getScheme(), worker.getHost(), worker.getIp(), worker.getCapacity(), "")) ); } void mockWorkerRunningTask(final String workerId, final Task task) throws Exception { - cf.delete().forPath(joiner.join(tasksPath, workerId, task.getId())); + cf.delete().forPath(JOINER.join(TASKS_PATH, workerId, task.getId())); - final String taskStatusPath = joiner.join(statusPath, workerId, task.getId()); + final String taskStatusPath = JOINER.join(STATUS_PATH, workerId, task.getId()); TaskAnnouncement taskAnnouncement = TaskAnnouncement.create(task, TaskStatus.running(task.getId()), DUMMY_LOCATION); cf.create() .creatingParentsIfNeeded() @@ -186,23 +186,23 @@ public class RemoteTaskRunnerTestUtils void mockWorkerCompleteSuccessfulTask(final String workerId, final Task task) throws Exception { TaskAnnouncement taskAnnouncement = TaskAnnouncement.create(task, TaskStatus.success(task.getId()), DUMMY_LOCATION); - cf.setData().forPath(joiner.join(statusPath, workerId, task.getId()), jsonMapper.writeValueAsBytes(taskAnnouncement)); + cf.setData().forPath(JOINER.join(STATUS_PATH, workerId, task.getId()), jsonMapper.writeValueAsBytes(taskAnnouncement)); } void mockWorkerCompleteFailedTask(final String workerId, final Task task) throws Exception { TaskAnnouncement taskAnnouncement = TaskAnnouncement.create(task, TaskStatus.failure(task.getId()), DUMMY_LOCATION); - cf.setData().forPath(joiner.join(statusPath, workerId, task.getId()), jsonMapper.writeValueAsBytes(taskAnnouncement)); + cf.setData().forPath(JOINER.join(STATUS_PATH, workerId, task.getId()), jsonMapper.writeValueAsBytes(taskAnnouncement)); } boolean workerRunningTask(final String workerId, final String taskId) { - return pathExists(joiner.join(statusPath, workerId, taskId)); + return pathExists(JOINER.join(STATUS_PATH, workerId, taskId)); } boolean taskAnnounced(final String workerId, final String taskId) { - return pathExists(joiner.join(tasksPath, workerId, taskId)); + return pathExists(JOINER.join(TASKS_PATH, workerId, taskId)); } boolean pathExists(final String path) diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java index d3e295ab9c2..e8b0222ba65 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/TaskLifecycleTest.java @@ -193,7 +193,7 @@ public class TaskLifecycleTest @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); - private static final Ordering byIntervalOrdering = new Ordering() + private static final Ordering BY_INTERVAL_ORDERING = new Ordering() { @Override public int compare(DataSegment dataSegment, DataSegment dataSegment2) @@ -203,13 +203,13 @@ public class TaskLifecycleTest }; private static DateTime now = DateTimes.nowUtc(); - private static final Iterable realtimeIdxTaskInputRows = ImmutableList.of( + private static final Iterable REALTIME_IDX_TASK_INPUT_ROWS = ImmutableList.of( ir(now.toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 1.0f), ir(now.plus(new Period(Hours.ONE)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 2.0f), ir(now.plus(new Period(Hours.TWO)).toString("YYYY-MM-dd'T'HH:mm:ss"), "test_dim1", "test_dim2", 3.0f) ); - private static final Iterable IdxTaskInputRows = ImmutableList.of( + private static final Iterable IDX_TASK_INPUT_ROWS = ImmutableList.of( ir("2010-01-01T01", "x", "y", 1), ir("2010-01-01T01", "x", "z", 1), ir("2010-01-02T01", "a", "b", 2), @@ -327,8 +327,8 @@ public class TaskLifecycleTest public Firehose connect(InputRowParser parser, File temporaryDirectory) { final Iterator inputRowIterator = usedByRealtimeIdxTask - ? realtimeIdxTaskInputRows.iterator() - : IdxTaskInputRows.iterator(); + ? REALTIME_IDX_TASK_INPUT_ROWS.iterator() + : IDX_TASK_INPUT_ROWS.iterator(); return new Firehose() { @@ -739,8 +739,8 @@ public class TaskLifecycleTest final TaskStatus mergedStatus = runTask(indexTask); final TaskStatus status = taskStorage.getStatus(indexTask.getId()).get(); - final List publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished()); - final List loggedSegments = byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId())); + final List publishedSegments = BY_INTERVAL_ORDERING.sortedCopy(mdc.getPublished()); + final List loggedSegments = BY_INTERVAL_ORDERING.sortedCopy(tsqa.getInsertedSegments(indexTask.getId())); Assert.assertEquals("statusCode", TaskState.SUCCESS, status.getStatusCode()); Assert.assertEquals(taskLocation, status.getLocation()); @@ -1255,8 +1255,8 @@ public class TaskLifecycleTest } final TaskStatus status = taskStorage.getStatus(indexTask.getId()).get(); - final List publishedSegments = byIntervalOrdering.sortedCopy(mdc.getPublished()); - final List loggedSegments = byIntervalOrdering.sortedCopy(tsqa.getInsertedSegments(indexTask.getId())); + final List publishedSegments = BY_INTERVAL_ORDERING.sortedCopy(mdc.getPublished()); + final List loggedSegments = BY_INTERVAL_ORDERING.sortedCopy(tsqa.getInsertedSegments(indexTask.getId())); Assert.assertEquals("statusCode", TaskState.SUCCESS, status.getStatusCode()); Assert.assertEquals(taskLocation, status.getLocation()); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/config/RemoteTaskRunnerConfigTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/config/RemoteTaskRunnerConfigTest.java index bd39ace0380..76cf8d3246c 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/config/RemoteTaskRunnerConfigTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/config/RemoteTaskRunnerConfigTest.java @@ -32,7 +32,7 @@ import java.util.Map; public class RemoteTaskRunnerConfigTest { - private static final ObjectMapper mapper = new DefaultObjectMapper(); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); private static final Period DEFAULT_TIMEOUT = Period.ZERO; private static final String DEFAULT_VERSION = ""; private static final long DEFAULT_MAX_ZNODE = 10 * 1024; @@ -44,7 +44,7 @@ public class RemoteTaskRunnerConfigTest @Test public void testIsJsonConfiguratable() { - JsonConfigurator.verifyClazzIsConfigurable(mapper, RemoteTaskRunnerConfig.class, null); + JsonConfigurator.verifyClazzIsConfigurable(MAPPER, RemoteTaskRunnerConfig.class, null); } @Test @@ -795,7 +795,7 @@ public class RemoteTaskRunnerConfigTest private RemoteTaskRunnerConfig reflect(RemoteTaskRunnerConfig config) throws IOException { - return mapper.readValue(mapper.writeValueAsString(config), RemoteTaskRunnerConfig.class); + return MAPPER.readValue(MAPPER.writeValueAsString(config), RemoteTaskRunnerConfig.class); } private RemoteTaskRunnerConfig generateRemoteTaskRunnerConfig( @@ -820,6 +820,6 @@ public class RemoteTaskRunnerConfigTest objectMap.put("maxRetriesBeforeBlacklist", maxRetriesBeforeBlacklist); objectMap.put("workerBlackListBackoffTime", taskBlackListBackoffTime); objectMap.put("workerBlackListCleanupPeriod", taskBlackListCleanupPeriod); - return mapper.convertValue(objectMap, RemoteTaskRunnerConfig.class); + return MAPPER.convertValue(objectMap, RemoteTaskRunnerConfig.class); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/FirehoseSamplerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/FirehoseSamplerTest.java index 782247d9cd2..06ab5dc1b58 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/FirehoseSamplerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/FirehoseSamplerTest.java @@ -71,7 +71,7 @@ public class FirehoseSamplerTest MAP, STR_JSON, STR_CSV } - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final boolean USE_DEFAULT_VALUE_FOR_NULL = Boolean.valueOf(System.getProperty( NullHandling.NULL_HANDLING_CONFIG_STRING, "true" @@ -130,7 +130,7 @@ public class FirehoseSamplerTest public void setupTest() { samplerCache = new SamplerCache(MapCache.create(100000)); - firehoseSampler = new FirehoseSampler(objectMapper, samplerCache); + firehoseSampler = new FirehoseSampler(OBJECT_MAPPER, samplerCache); } @Test @@ -216,7 +216,7 @@ public class FirehoseSamplerTest FirehoseFactory firehoseFactory = getFirehoseFactory(getTestRows()); ParseSpec parseSpec = getParseSpec(new TimestampSpec(null, null, DateTimes.of("1970")), new DimensionsSpec(null)); - DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, objectMapper); + DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, OBJECT_MAPPER); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -270,7 +270,7 @@ public class FirehoseSamplerTest FirehoseFactory firehoseFactory = getFirehoseFactory(getTestRows()); ParseSpec parseSpec = getParseSpec(new TimestampSpec("t", null, null), new DimensionsSpec(null)); - DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, objectMapper); + DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, OBJECT_MAPPER); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -330,7 +330,7 @@ public class FirehoseSamplerTest StringDimensionSchema.create("met1") )) ); - DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, objectMapper); + DataSchema dataSchema = new DataSchema("sampler", getParser(parseSpec), null, null, null, OBJECT_MAPPER); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -392,7 +392,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, null, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -455,7 +455,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, null, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -509,7 +509,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, null, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -557,7 +557,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, null, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -615,7 +615,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, transformSpec, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -680,7 +680,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, transformSpec, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -729,7 +729,7 @@ public class FirehoseSamplerTest aggregatorFactories, granularitySpec, transformSpec, - objectMapper + OBJECT_MAPPER ); SamplerResponse response = firehoseSampler.sample(firehoseFactory, dataSchema, null); @@ -762,7 +762,7 @@ public class FirehoseSamplerTest private Map getParser(ParseSpec parseSpec) { - return objectMapper.convertValue( + return OBJECT_MAPPER.convertValue( ParserType.MAP.equals(parserType) ? new MapInputRowParser(parseSpec) : new StringInputRowParser(parseSpec, StandardCharsets.UTF_8.name()), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/IndexTaskSamplerSpecTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/IndexTaskSamplerSpecTest.java index 1964a8004da..43b71b180fb 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/IndexTaskSamplerSpecTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/IndexTaskSamplerSpecTest.java @@ -41,7 +41,7 @@ import java.util.Map; public class IndexTaskSamplerSpecTest extends EasyMockSupport { - private static final ObjectMapper mapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper(); private final FirehoseSampler firehoseSampler = createMock(FirehoseSampler.class); @@ -50,13 +50,13 @@ public class IndexTaskSamplerSpecTest extends EasyMockSupport public IndexTaskSamplerSpecTest() { - mapper.setInjectableValues( + MAPPER.setInjectableValues( new InjectableValues.Std() .addValue(FirehoseSampler.class, firehoseSampler) - .addValue(ObjectMapper.class, mapper) + .addValue(ObjectMapper.class, MAPPER) ); - mapper.registerModules((Iterable) new SamplerModule().getJacksonModules()); - mapper.registerModules((Iterable) new FirehoseModule().getJacksonModules()); + MAPPER.registerModules((Iterable) new SamplerModule().getJacksonModules()); + MAPPER.registerModules((Iterable) new FirehoseModule().getJacksonModules()); } @Test @@ -99,7 +99,7 @@ public class IndexTaskSamplerSpecTest extends EasyMockSupport Capture capturedDataSchema = EasyMock.newCapture(); Capture capturedSamplerConfig = EasyMock.newCapture(); - IndexTaskSamplerSpec spec = mapper.readValue(json, IndexTaskSamplerSpec.class); + IndexTaskSamplerSpec spec = MAPPER.readValue(json, IndexTaskSamplerSpec.class); EasyMock.expect(firehoseSampler.sample( EasyMock.capture(capturedFirehoseFactory), diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/SamplerResponseTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/SamplerResponseTest.java index 503045d8aaf..e0dca58497a 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/SamplerResponseTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/sampler/SamplerResponseTest.java @@ -31,7 +31,7 @@ import java.util.List; public class SamplerResponseTest { - private static final ObjectMapper mapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper(); @Test public void testSerde() throws IOException @@ -52,7 +52,7 @@ public class SamplerResponseTest new SamplerResponse.SamplerResponseRow("unparsed", null, true, "Could not parse") ); - String out = mapper.writeValueAsString(new SamplerResponse("eaebbfd87ec34bc6a9f8c03ecee4dd7a", 1123, 1112, data)); + String out = MAPPER.writeValueAsString(new SamplerResponse("eaebbfd87ec34bc6a9f8c03ecee4dd7a", 1123, 1112, data)); String expected = "{\"cacheKey\":\"eaebbfd87ec34bc6a9f8c03ecee4dd7a\",\"numRowsRead\":1123,\"numRowsIndexed\":1112,\"data\":[{\"raw\":\"parsed1\",\"parsed\":{\"t\":123456,\"dim1\":\"foo\",\"met1\":6}},{\"raw\":\"parsed2\",\"parsed\":{\"t\":123457,\"dim1\":\"foo2\",\"met1\":7}},{\"raw\":\"unparsed\",\"unparseable\":true,\"error\":\"Could not parse\"}]}"; Assert.assertEquals(expected, out); diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java index bac1cc8e959..9d467608747 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java @@ -92,11 +92,11 @@ import java.util.concurrent.ScheduledExecutorService; public class SeekableStreamSupervisorStateTest extends EasyMockSupport { - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); private static final String DATASOURCE = "testDS"; private static final String STREAM = "stream"; private static final String SHARD_ID = "0"; - private static final StreamPartition shard0Partition = StreamPartition.of(STREAM, SHARD_ID); + private static final StreamPartition SHARD0_PARTITION = StreamPartition.of(STREAM, SHARD_ID); private static final String EXCEPTION_MSG = "I had an exception"; private TaskStorage taskStorage; @@ -149,7 +149,7 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes(); - EasyMock.expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard0Partition)).anyTimes(); + EasyMock.expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(SHARD0_PARTITION)).anyTimes(); EasyMock.expect(recordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("10").anyTimes(); } @@ -561,7 +561,7 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport return new DataSchema( DATASOURCE, - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -584,7 +584,7 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport ImmutableList.of() ), null, - objectMapper + OBJECT_MAPPER ); } @@ -740,7 +740,7 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport taskMaster, indexerMetadataStorageCoordinator, taskClientFactory, - objectMapper, + OBJECT_MAPPER, spec, rowIngestionMetersFactory, false diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java index 023251d1e15..a5bddbea4a4 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/WorkerTaskMonitorTest.java @@ -66,10 +66,10 @@ import java.util.List; */ public class WorkerTaskMonitorTest { - private static final Joiner joiner = Joiner.on("/"); - private static final String basePath = "/test/druid"; - private static final String tasksPath = StringUtils.format("%s/indexer/tasks/worker", basePath); - private static final String statusPath = StringUtils.format("%s/indexer/status/worker", basePath); + private static final Joiner JOINER = Joiner.on("/"); + private static final String BASE_PATH = "/test/druid"; + private static final String TASKS_PATH = StringUtils.format("%s/indexer/tasks/worker", BASE_PATH); + private static final String STATUS_PATH = StringUtils.format("%s/indexer/status/worker", BASE_PATH); private static final DruidNode DUMMY_NODE = new DruidNode("dummy", "dummy", false, 9000, null, true, false); private TestingCluster testingCluster; @@ -105,7 +105,7 @@ public class WorkerTaskMonitorTest .build(); cf.start(); cf.blockUntilConnected(); - cf.create().creatingParentsIfNeeded().forPath(basePath); + cf.create().creatingParentsIfNeeded().forPath(BASE_PATH); worker = new Worker( "http", @@ -123,7 +123,7 @@ public class WorkerTaskMonitorTest @Override public String getBase() { - return basePath; + return BASE_PATH; } }, null, null, null, null ), @@ -215,7 +215,7 @@ public class WorkerTaskMonitorTest public boolean isValid() { try { - return cf.checkExists().forPath(joiner.join(tasksPath, task.getId())) == null; + return cf.checkExists().forPath(JOINER.join(TASKS_PATH, task.getId())) == null; } catch (Exception e) { return false; @@ -227,7 +227,7 @@ public class WorkerTaskMonitorTest cf.create() .creatingParentsIfNeeded() - .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); + .forPath(JOINER.join(TASKS_PATH, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( @@ -237,7 +237,7 @@ public class WorkerTaskMonitorTest public boolean isValid() { try { - final byte[] bytes = cf.getData().forPath(joiner.join(statusPath, task.getId())); + final byte[] bytes = cf.getData().forPath(JOINER.join(STATUS_PATH, task.getId())); final TaskAnnouncement announcement = jsonMapper.readValue( bytes, TaskAnnouncement.class @@ -253,7 +253,7 @@ public class WorkerTaskMonitorTest ); TaskAnnouncement taskAnnouncement = jsonMapper.readValue( - cf.getData().forPath(joiner.join(statusPath, task.getId())), TaskAnnouncement.class + cf.getData().forPath(JOINER.join(STATUS_PATH, task.getId())), TaskAnnouncement.class ); Assert.assertEquals(task.getId(), taskAnnouncement.getTaskStatus().getId()); @@ -265,7 +265,7 @@ public class WorkerTaskMonitorTest { cf.create() .creatingParentsIfNeeded() - .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); + .forPath(JOINER.join(TASKS_PATH, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( @@ -275,7 +275,7 @@ public class WorkerTaskMonitorTest public boolean isValid() { try { - final byte[] bytes = cf.getData().forPath(joiner.join(statusPath, task.getId())); + final byte[] bytes = cf.getData().forPath(JOINER.join(STATUS_PATH, task.getId())); final TaskAnnouncement announcement = jsonMapper.readValue( bytes, TaskAnnouncement.class @@ -305,7 +305,7 @@ public class WorkerTaskMonitorTest cf.create() .creatingParentsIfNeeded() - .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); + .forPath(JOINER.join(TASKS_PATH, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( @@ -315,7 +315,7 @@ public class WorkerTaskMonitorTest public boolean isValid() { try { - return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; + return cf.checkExists().forPath(JOINER.join(STATUS_PATH, task.getId())) != null; } catch (Exception e) { return false; @@ -339,7 +339,7 @@ public class WorkerTaskMonitorTest { cf.create() .creatingParentsIfNeeded() - .forPath(joiner.join(tasksPath, task.getId()), jsonMapper.writeValueAsBytes(task)); + .forPath(JOINER.join(TASKS_PATH, task.getId()), jsonMapper.writeValueAsBytes(task)); Assert.assertTrue( TestUtils.conditionValid( @@ -349,7 +349,7 @@ public class WorkerTaskMonitorTest public boolean isValid() { try { - return cf.checkExists().forPath(joiner.join(statusPath, task.getId())) != null; + return cf.checkExists().forPath(JOINER.join(STATUS_PATH, task.getId())) != null; } catch (Exception e) { return false; @@ -359,7 +359,7 @@ public class WorkerTaskMonitorTest ) ); // ephemeral owner is 0 is created node is PERSISTENT - Assert.assertEquals(0, cf.checkExists().forPath(joiner.join(statusPath, task.getId())).getEphemeralOwner()); + Assert.assertEquals(0, cf.checkExists().forPath(JOINER.join(STATUS_PATH, task.getId())).getEphemeralOwner()); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/worker/http/WorkerResourceTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/worker/http/WorkerResourceTest.java index 78aead2bce5..3e79405223f 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/worker/http/WorkerResourceTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/worker/http/WorkerResourceTest.java @@ -45,9 +45,9 @@ import javax.ws.rs.core.Response; */ public class WorkerResourceTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); - private static final String basePath = "/test/druid"; - private static final String announcementsPath = StringUtils.format("%s/indexer/announcements/host", basePath); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); + private static final String BASE_PATH = "/test/druid"; + private static final String ANNOUNCEMENT_PATH = StringUtils.format("%s/indexer/announcements/host", BASE_PATH); private TestingCluster testingCluster; private CuratorFramework cf; @@ -70,7 +70,7 @@ public class WorkerResourceTest .build(); cf.start(); cf.blockUntilConnected(); - cf.create().creatingParentsIfNeeded().forPath(basePath); + cf.create().creatingParentsIfNeeded().forPath(BASE_PATH); worker = new Worker( "http", @@ -81,13 +81,13 @@ public class WorkerResourceTest ); curatorCoordinator = new WorkerCuratorCoordinator( - jsonMapper, + JSON_MAPPER, new IndexerZkConfig(new ZkPathsConfig() { @Override public String getBase() { - return basePath; + return BASE_PATH; } }, null, null, null, null), new RemoteTaskRunnerConfig(), @@ -115,13 +115,13 @@ public class WorkerResourceTest @Test public void testDoDisable() throws Exception { - Worker theWorker = jsonMapper.readValue(cf.getData().forPath(announcementsPath), Worker.class); + Worker theWorker = JSON_MAPPER.readValue(cf.getData().forPath(ANNOUNCEMENT_PATH), Worker.class); Assert.assertEquals("v1", theWorker.getVersion()); Response res = workerResource.doDisable(); Assert.assertEquals(Response.Status.OK.getStatusCode(), res.getStatus()); - theWorker = jsonMapper.readValue(cf.getData().forPath(announcementsPath), Worker.class); + theWorker = JSON_MAPPER.readValue(cf.getData().forPath(ANNOUNCEMENT_PATH), Worker.class); Assert.assertTrue(theWorker.getVersion().isEmpty()); } @@ -131,13 +131,13 @@ public class WorkerResourceTest // Disable the worker Response res = workerResource.doDisable(); Assert.assertEquals(Response.Status.OK.getStatusCode(), res.getStatus()); - Worker theWorker = jsonMapper.readValue(cf.getData().forPath(announcementsPath), Worker.class); + Worker theWorker = JSON_MAPPER.readValue(cf.getData().forPath(ANNOUNCEMENT_PATH), Worker.class); Assert.assertTrue(theWorker.getVersion().isEmpty()); // Enable the worker res = workerResource.doEnable(); Assert.assertEquals(Response.Status.OK.getStatusCode(), res.getStatus()); - theWorker = jsonMapper.readValue(cf.getData().forPath(announcementsPath), Worker.class); + theWorker = JSON_MAPPER.readValue(cf.getData().forPath(ANNOUNCEMENT_PATH), Worker.class); Assert.assertEquals("v1", theWorker.getVersion()); } } diff --git a/indexing-service/src/test/java/org/apache/druid/server/initialization/IndexerZkConfigTest.java b/indexing-service/src/test/java/org/apache/druid/server/initialization/IndexerZkConfigTest.java index f6d8d0717a6..e2f4a837e03 100644 --- a/indexing-service/src/test/java/org/apache/druid/server/initialization/IndexerZkConfigTest.java +++ b/indexing-service/src/test/java/org/apache/druid/server/initialization/IndexerZkConfigTest.java @@ -51,11 +51,11 @@ import java.util.UUID; */ public class IndexerZkConfigTest { - private static final String indexerPropertyString = "test.druid.zk.paths.indexer"; - private static final String zkServiceConfigString = "test.druid.zk.paths"; - private static final Collection clobberableProperties = new HashSet<>(); + private static final String INDEXER_PROPERTY_STRING = "test.druid.zk.paths.indexer"; + private static final String ZK_SERVICE_CONFIG_STRING = "test.druid.zk.paths"; + private static final Collection CLOBBERABLE_PROPERTIES = new HashSet<>(); - private static final Module simpleZkConfigModule = new Module() + private static final Module SIMPLE_ZK_CONFIG_MODULE = new Module() { @Override public void configure(Binder binder) @@ -64,8 +64,8 @@ public class IndexerZkConfigTest binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); // See IndexingServiceModuleHelper - JsonConfigProvider.bind(binder, indexerPropertyString, IndexerZkConfig.class); - JsonConfigProvider.bind(binder, zkServiceConfigString, ZkPathsConfig.class); + JsonConfigProvider.bind(binder, INDEXER_PROPERTY_STRING, IndexerZkConfig.class); + JsonConfigProvider.bind(binder, ZK_SERVICE_CONFIG_STRING, ZkPathsConfig.class); } }; @@ -74,12 +74,12 @@ public class IndexerZkConfigTest { for (Field field : IndexerZkConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - clobberableProperties.add(StringUtils.format("%s.%s", indexerPropertyString, field.getName())); + CLOBBERABLE_PROPERTIES.add(StringUtils.format("%s.%s", INDEXER_PROPERTY_STRING, field.getName())); } } for (Field field : ZkPathsConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - clobberableProperties.add(StringUtils.format("%s.%s", zkServiceConfigString, field.getName())); + CLOBBERABLE_PROPERTIES.add(StringUtils.format("%s.%s", ZK_SERVICE_CONFIG_STRING, field.getName())); } } } @@ -90,7 +90,7 @@ public class IndexerZkConfigTest @Before public void setupTest() { - for (String property : clobberableProperties) { + for (String property : CLOBBERABLE_PROPERTIES) { propertyValues.put(property, UUID.randomUUID().toString()); } assertions = 0; @@ -102,7 +102,7 @@ public class IndexerZkConfigTest { for (Field field : ZkPathsConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - String property = StringUtils.format("%s.%s", zkServiceConfigString, field.getName()); + String property = StringUtils.format("%s.%s", ZK_SERVICE_CONFIG_STRING, field.getName()); String getter = StringUtils.format( "get%s%s", StringUtils.toUpperCase(field.getName().substring(0, 1)), @@ -120,7 +120,7 @@ public class IndexerZkConfigTest { for (Field field : IndexerZkConfig.class.getDeclaredFields()) { if (null != field.getAnnotation(JsonProperty.class)) { - String property = StringUtils.format("%s.%s", indexerPropertyString, field.getName()); + String property = StringUtils.format("%s.%s", INDEXER_PROPERTY_STRING, field.getName()); String getter = StringUtils.format( "get%s%s", StringUtils.toUpperCase(field.getName().substring(0, 1)), @@ -140,15 +140,15 @@ public class IndexerZkConfigTest final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(SIMPLE_ZK_CONFIG_MODULE) ); JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(zkServiceConfigString, ZkPathsConfig.class); + JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(ZK_SERVICE_CONFIG_STRING, ZkPathsConfig.class); zkPathsConfig.inject(propertyValues, configurator); JsonConfigProvider indexerZkConfig = JsonConfigProvider.of( - indexerPropertyString, + INDEXER_PROPERTY_STRING, IndexerZkConfig.class ); indexerZkConfig.inject(propertyValues, configurator); @@ -161,15 +161,15 @@ public class IndexerZkConfigTest { final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(SIMPLE_ZK_CONFIG_MODULE) ); JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(zkServiceConfigString, ZkPathsConfig.class); + JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(ZK_SERVICE_CONFIG_STRING, ZkPathsConfig.class); zkPathsConfig.inject(propertyValues, configurator); JsonConfigProvider indexerZkConfig = JsonConfigProvider.of( - indexerPropertyString, + INDEXER_PROPERTY_STRING, IndexerZkConfig.class ); indexerZkConfig.inject(propertyValues, configurator); @@ -180,7 +180,7 @@ public class IndexerZkConfigTest validateEntries(zkConfig); validateEntries(zkPathsConfig1); - Assert.assertEquals(clobberableProperties.size(), assertions); + Assert.assertEquals(CLOBBERABLE_PROPERTIES.size(), assertions); } @@ -189,12 +189,12 @@ public class IndexerZkConfigTest public void testIndexerBaseOverride() { final String overrideValue = "/foo/bar/baz"; - final String indexerPropertyKey = indexerPropertyString + ".base"; + final String indexerPropertyKey = INDEXER_PROPERTY_STRING + ".base"; final String priorValue = System.getProperty(indexerPropertyKey); System.setProperty(indexerPropertyKey, overrideValue); // Set it here so that the binding picks it up final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(SIMPLE_ZK_CONFIG_MODULE) ); propertyValues.clear(); propertyValues.setProperty(indexerPropertyKey, overrideValue); // Have to set it here as well annoyingly enough @@ -203,7 +203,7 @@ public class IndexerZkConfigTest JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); JsonConfigProvider indexerPathsConfig = JsonConfigProvider.of( - indexerPropertyString, + INDEXER_PROPERTY_STRING, IndexerZkConfig.class ); indexerPathsConfig.inject(propertyValues, configurator); @@ -226,15 +226,15 @@ public class IndexerZkConfigTest { final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), - ImmutableList.of(simpleZkConfigModule) + ImmutableList.of(SIMPLE_ZK_CONFIG_MODULE) ); - propertyValues.setProperty(zkServiceConfigString + ".base", "/druid/metrics"); + propertyValues.setProperty(ZK_SERVICE_CONFIG_STRING + ".base", "/druid/metrics"); JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); JsonConfigProvider zkPathsConfig = JsonConfigProvider.of( - zkServiceConfigString, + ZK_SERVICE_CONFIG_STRING, ZkPathsConfig.class ); diff --git a/integration-tests/src/main/java/org/apache/druid/testing/guice/DruidTestModuleFactory.java b/integration-tests/src/main/java/org/apache/druid/testing/guice/DruidTestModuleFactory.java index 90a220835a9..0e95e9ac824 100644 --- a/integration-tests/src/main/java/org/apache/druid/testing/guice/DruidTestModuleFactory.java +++ b/integration-tests/src/main/java/org/apache/druid/testing/guice/DruidTestModuleFactory.java @@ -33,15 +33,15 @@ import java.util.List; public class DruidTestModuleFactory implements IModuleFactory { - private static final Module module = new DruidTestModule(); - private static final Injector injector = Initialization.makeInjectorWithModules( + private static final Module MODULE = new DruidTestModule(); + private static final Injector INJECTOR = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), getModules() ); public static Injector getInjector() { - return injector; + return INJECTOR; } private static List getModules() @@ -55,8 +55,8 @@ public class DruidTestModuleFactory implements IModuleFactory @Override public Module createModule(ITestContext context, Class testClass) { - context.addGuiceModule(DruidTestModule.class, module); - context.addInjector(Collections.singletonList(module), injector); - return module; + context.addGuiceModule(DruidTestModule.class, MODULE); + context.addInjector(Collections.singletonList(MODULE), INJECTOR); + return MODULE; } } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregator.java index 04fff6e29cd..be665a3ce78 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregator.java @@ -32,14 +32,14 @@ import java.util.List; public class CardinalityAggregator implements Aggregator { - public static final HashFunction hashFn = Hashing.murmur3_128(); + public static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(); static void hashRow( ColumnSelectorPlus[] selectorPluses, HyperLogLogCollector collector ) { - final Hasher hasher = hashFn.newHasher(); + final Hasher hasher = HASH_FUNCTION.newHasher(); for (int k = 0; k < selectorPluses.length; ++k) { if (k != 0) { hasher.putByte((byte) 0); diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java index 68967b8bfda..14714758ed9 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/DoubleCardinalityAggregatorColumnSelectorStrategy.java @@ -46,7 +46,7 @@ public class DoubleCardinalityAggregatorColumnSelectorStrategy public void hashValues(BaseDoubleColumnValueSelector selector, HyperLogLogCollector collector) { if (NullHandling.replaceWithDefault() || !selector.isNull()) { - collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits(selector.getDouble())).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashLong(Double.doubleToLongBits(selector.getDouble())).asBytes()); } } } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java index c9c1e575c64..59a242f1f49 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/FloatCardinalityAggregatorColumnSelectorStrategy.java @@ -46,7 +46,7 @@ public class FloatCardinalityAggregatorColumnSelectorStrategy public void hashValues(BaseFloatColumnValueSelector selector, HyperLogLogCollector collector) { if (NullHandling.replaceWithDefault() || !selector.isNull()) { - collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashInt(Float.floatToIntBits(selector.getFloat())).asBytes()); } } } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java index f8f735d3e14..d6ffea54e24 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/LongCardinalityAggregatorColumnSelectorStrategy.java @@ -46,7 +46,7 @@ public class LongCardinalityAggregatorColumnSelectorStrategy public void hashValues(BaseLongColumnValueSelector selector, HyperLogLogCollector collector) { if (NullHandling.replaceWithDefault() || !selector.isNull()) { - collector.add(CardinalityAggregator.hashFn.hashLong(selector.getLong()).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashLong(selector.getLong()).asBytes()); } } } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java index 5fe5f7ae1fe..ca4c69c8cfa 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/cardinality/types/StringCardinalityAggregatorColumnSelectorStrategy.java @@ -84,7 +84,7 @@ public class StringCardinalityAggregatorColumnSelectorStrategy implements Cardin // Skip counting null values when we are not replacing null with default value. // A special value for null in case null handling is configured to use empty string for null. if (NullHandling.replaceWithDefault() || value != null) { - collector.add(CardinalityAggregator.hashFn.hashUnencodedChars(nullToSpecial(value)).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashUnencodedChars(nullToSpecial(value)).asBytes()); } } } diff --git a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java index ea678dc9136..2493327bbbc 100644 --- a/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java +++ b/processing/src/main/java/org/apache/druid/query/aggregation/post/ArithmeticPostAggregator.java @@ -243,11 +243,11 @@ public class ArithmeticPostAggregator implements PostAggregator } }; - private static final Map lookupMap = new HashMap<>(); + private static final Map LOOKUP_MAP = new HashMap<>(); static { for (Ops op : Ops.values()) { - lookupMap.put(op.getFn(), op); + LOOKUP_MAP.put(op.getFn(), op); } } @@ -267,12 +267,12 @@ public class ArithmeticPostAggregator implements PostAggregator static Ops lookup(String fn) { - return lookupMap.get(fn); + return LOOKUP_MAP.get(fn); } static Set getFns() { - return lookupMap.keySet(); + return LOOKUP_MAP.keySet(); } } diff --git a/processing/src/main/java/org/apache/druid/query/context/ResponseContext.java b/processing/src/main/java/org/apache/druid/query/context/ResponseContext.java index f94b1d24354..a399233e70b 100644 --- a/processing/src/main/java/org/apache/druid/query/context/ResponseContext.java +++ b/processing/src/main/java/org/apache/druid/query/context/ResponseContext.java @@ -172,7 +172,7 @@ public abstract class ResponseContext /** * TreeMap is used to have the natural ordering of its keys */ - private static final Map registeredKeys = new TreeMap<>(); + private static final Map REGISTERED_KEYS = new TreeMap<>(); static { for (BaseKey key : values()) { @@ -187,11 +187,11 @@ public abstract class ResponseContext public static synchronized void registerKey(BaseKey key) { Preconditions.checkArgument( - !registeredKeys.containsKey(key.getName()), + !REGISTERED_KEYS.containsKey(key.getName()), "Key [%s] has already been registered as a context key", key.getName() ); - registeredKeys.put(key.getName(), key); + REGISTERED_KEYS.put(key.getName(), key); } /** @@ -201,11 +201,11 @@ public abstract class ResponseContext public static BaseKey keyOf(String name) { Preconditions.checkState( - registeredKeys.containsKey(name), + REGISTERED_KEYS.containsKey(name), "Key [%s] has not yet been registered as a context key", name ); - return registeredKeys.get(name); + return REGISTERED_KEYS.get(name); } /** @@ -213,7 +213,7 @@ public abstract class ResponseContext */ public static Collection getAllRegisteredKeys() { - return Collections.unmodifiableCollection(registeredKeys.values()); + return Collections.unmodifiableCollection(REGISTERED_KEYS.values()); } private final String name; @@ -247,7 +247,7 @@ public abstract class ResponseContext protected abstract Map getDelegate(); - private static final Comparator> valueLengthReversedComparator = + private static final Comparator> VALUE_LENGTH_REVERSED_COMPARATOR = Comparator.comparing((Map.Entry e) -> e.getValue().toString().length()).reversed(); /** @@ -341,7 +341,7 @@ public abstract class ResponseContext add(Key.TRUNCATED, true); final ObjectNode contextJsonNode = objectMapper.valueToTree(getDelegate()); final ArrayList> sortedNodesByLength = Lists.newArrayList(contextJsonNode.fields()); - sortedNodesByLength.sort(valueLengthReversedComparator); + sortedNodesByLength.sort(VALUE_LENGTH_REVERSED_COMPARATOR); int needToRemoveCharsNumber = fullSerializedString.length() - maxCharsNumber; // The complexity of this block is O(n*m*log(m)) where n - context size, m - context's array size for (Map.Entry e : sortedNodesByLength) { diff --git a/processing/src/main/java/org/apache/druid/query/extraction/IdentityExtractionFn.java b/processing/src/main/java/org/apache/druid/query/extraction/IdentityExtractionFn.java index 462700f9ac4..c77efa9a726 100644 --- a/processing/src/main/java/org/apache/druid/query/extraction/IdentityExtractionFn.java +++ b/processing/src/main/java/org/apache/druid/query/extraction/IdentityExtractionFn.java @@ -25,7 +25,7 @@ import javax.annotation.Nullable; public class IdentityExtractionFn implements ExtractionFn { - private static final IdentityExtractionFn instance = new IdentityExtractionFn(); + private static final IdentityExtractionFn INSTANCE = new IdentityExtractionFn(); private IdentityExtractionFn() { @@ -84,6 +84,6 @@ public class IdentityExtractionFn implements ExtractionFn public static final IdentityExtractionFn getInstance() { - return instance; + return INSTANCE; } } diff --git a/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java b/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java index d71eb782320..ac043d781c2 100644 --- a/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java +++ b/processing/src/main/java/org/apache/druid/query/groupby/orderby/OrderByColumnSpec.java @@ -52,13 +52,13 @@ public class OrderByColumnSpec * Maintain a map of the enum values so that we can just do a lookup and get a null if it doesn't exist instead * of an exception thrown. */ - private static final Map stupidEnumMap; + private static final Map STUPID_ENUM_MAP; static { final ImmutableMap.Builder bob = ImmutableMap.builder(); for (Direction direction : Direction.values()) { bob.put(direction.name(), direction); } - stupidEnumMap = bob.build(); + STUPID_ENUM_MAP = bob.build(); } @JsonValue @@ -72,7 +72,7 @@ public class OrderByColumnSpec public static Direction fromString(String name) { final String upperName = StringUtils.toUpperCase(name); - Direction direction = stupidEnumMap.get(upperName); + Direction direction = STUPID_ENUM_MAP.get(upperName); if (direction == null) { for (Direction dir : Direction.values()) { diff --git a/processing/src/main/java/org/apache/druid/query/monomorphicprocessing/SpecializationService.java b/processing/src/main/java/org/apache/druid/query/monomorphicprocessing/SpecializationService.java index e9c844bcb6a..7f640cc8044 100644 --- a/processing/src/main/java/org/apache/druid/query/monomorphicprocessing/SpecializationService.java +++ b/processing/src/main/java/org/apache/druid/query/monomorphicprocessing/SpecializationService.java @@ -70,7 +70,7 @@ public final class SpecializationService * JITWatch shows only classes present in the loaded JAR (prototypeClass should be), not classes generated during * runtime. */ - private static final boolean fakeSpecialize = Boolean.getBoolean("fakeSpecialize"); + private static final boolean FAKE_SPECIALIZE = Boolean.getBoolean("fakeSpecialize"); /** * Number of loop iterations, accounted via {@link SpecializationState#accountLoopIterations(long)} in @@ -78,21 +78,21 @@ public final class SpecializationService * to specialize class for the specific runtimeShape. The default value is chosen to be so that the specialized * class will likely be compiled with C2 HotSpot compiler with the default values of *BackEdgeThreshold options. */ - private static final int triggerSpecializationIterationsThreshold = + private static final int TRIGGER_SPECIALIZATION_ITERATIONS_THRESHOLD = Integer.getInteger("triggerSpecializationIterationsThreshold", 10_000); /** * The maximum number of specializations, that this service is allowed to make. It's not unlimited because each * specialization takes some JVM memory (machine code cache, byte code, etc.) */ - private static final int maxSpecializations = Integer.getInteger("maxSpecializations", 1000); - private static final AtomicBoolean maxSpecializationsWarningEmitted = new AtomicBoolean(false); + private static final int MAX_SPECIALIZATIONS = Integer.getInteger("maxSpecializations", 1000); + private static final AtomicBoolean MAX_SPECIALIZATIONS_WARNING_EMITTED = new AtomicBoolean(false); - private static final ExecutorService classSpecializationExecutor = Execs.singleThreaded("class-specialization-%d"); + private static final ExecutorService CLASS_SPECIALIZATION_EXECUTOR = Execs.singleThreaded("class-specialization-%d"); - private static final AtomicLong specializedClassCounter = new AtomicLong(); + private static final AtomicLong SPECIALIZED_CLASS_COUNTER = new AtomicLong(); - private static final ClassValue perPrototypeClassState = + private static final ClassValue PER_PROTOTYPE_CLASS_STATE = new ClassValue() { @Override @@ -125,7 +125,7 @@ public final class SpecializationService ImmutableMap, Class> classRemapping ) { - return perPrototypeClassState.get(prototypeClass).getSpecializationState(runtimeShape, classRemapping); + return PER_PROTOTYPE_CLASS_STATE.get(prototypeClass).getSpecializationState(runtimeShape, classRemapping); } static class PerPrototypeClassState @@ -160,7 +160,7 @@ public final class SpecializationService T specialize(ImmutableMap, Class> classRemapping) { - String specializedClassName = specializedClassNamePrefix + specializedClassCounter.get(); + String specializedClassName = specializedClassNamePrefix + SPECIALIZED_CLASS_COUNTER.get(); ClassWriter specializedClassWriter = new ClassWriter(0); SimpleRemapper remapper = new SimpleRemapper(createRemapping(classRemapping, specializedClassName)); ClassVisitor classTransformer = new ClassRemapper(specializedClassWriter, remapper); @@ -174,7 +174,7 @@ public final class SpecializationService specializedClassBytecode, specializedClassName ); - specializedClassCounter.incrementAndGet(); + SPECIALIZED_CLASS_COUNTER.incrementAndGet(); return specializedClass.newInstance(); } catch (InstantiationException | IllegalAccessException | IOException e) { @@ -198,7 +198,7 @@ public final class SpecializationService } /** - * No synchronization, because {@link #specialize} is called only from {@link #classSpecializationExecutor}, i. e. + * No synchronization, because {@link #specialize} is called only from {@link #CLASS_SPECIALIZATION_EXECUTOR}, i. e. * from a single thread. */ byte[] getPrototypeClassBytecode() throws IOException @@ -284,10 +284,10 @@ public final class SpecializationService if (specializationScheduled.get()) { return; } - if (loopIterations > triggerSpecializationIterationsThreshold || - addAndGetTotalIterationsOverTheLastHour(loopIterations) > triggerSpecializationIterationsThreshold) { + if (loopIterations > TRIGGER_SPECIALIZATION_ITERATIONS_THRESHOLD || + addAndGetTotalIterationsOverTheLastHour(loopIterations) > TRIGGER_SPECIALIZATION_ITERATIONS_THRESHOLD) { if (specializationScheduled.compareAndSet(false, true)) { - classSpecializationExecutor.submit(this); + CLASS_SPECIALIZATION_EXECUTOR.submit(this); } } } @@ -323,23 +323,23 @@ public final class SpecializationService { try { T specialized; - if (specializedClassCounter.get() > maxSpecializations) { + if (SPECIALIZED_CLASS_COUNTER.get() > MAX_SPECIALIZATIONS) { // Don't specialize, just instantiate the prototype class and emit a warning. // The "better" approach is probably to implement some kind of cache eviction from // PerPrototypeClassState.specializationStates. But it might be that nobody ever hits even the current // maxSpecializations limit, so implementing cache eviction is an unnecessary complexity. specialized = perPrototypeClassState.prototypeClass.newInstance(); - if (!maxSpecializationsWarningEmitted.get() && maxSpecializationsWarningEmitted.compareAndSet(false, true)) { + if (!MAX_SPECIALIZATIONS_WARNING_EMITTED.get() && MAX_SPECIALIZATIONS_WARNING_EMITTED.compareAndSet(false, true)) { LOG.warn( "SpecializationService couldn't make more than [%d] specializations. " + "Not doing specialization for runtime shape[%s] and class remapping[%s], using the prototype class[%s]", - maxSpecializations, + MAX_SPECIALIZATIONS, specializationId.runtimeShape, specializationId.classRemapping, perPrototypeClassState.prototypeClass ); } - } else if (fakeSpecialize) { + } else if (FAKE_SPECIALIZE) { specialized = perPrototypeClassState.prototypeClass.newInstance(); LOG.info( "Not specializing prototype class[%s] for runtime shape[%s] and class remapping[%s] because " diff --git a/processing/src/main/java/org/apache/druid/query/select/EventHolder.java b/processing/src/main/java/org/apache/druid/query/select/EventHolder.java index 0146decb84d..ef5ee4400df 100644 --- a/processing/src/main/java/org/apache/druid/query/select/EventHolder.java +++ b/processing/src/main/java/org/apache/druid/query/select/EventHolder.java @@ -32,7 +32,7 @@ import java.util.Map; */ public class EventHolder { - public static final String timestampKey = "timestamp"; + public static final String TIMESTAMP_KEY = "timestamp"; private final String segmentId; private final int offset; @@ -52,7 +52,7 @@ public class EventHolder public DateTime getTimestamp() { - Object retVal = event.get(timestampKey); + Object retVal = event.get(TIMESTAMP_KEY); if (retVal instanceof Long) { return DateTimes.utc((Long) retVal); } else if (retVal instanceof String) { diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java b/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java index 38d922905f4..848eace2a26 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectQueryEngine.java @@ -267,7 +267,7 @@ public class SelectQueryEngine int lastOffset = offset.startOffset(); for (; !cursor.isDone() && offset.hasNext(); cursor.advance(), offset.next()) { final Map theEvent = singleEvent( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, timestampColumnSelector, selectorPlusList, metSelectors diff --git a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java index 8824305af45..646349af01d 100644 --- a/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java +++ b/processing/src/main/java/org/apache/druid/query/select/SelectResultValueBuilder.java @@ -38,7 +38,7 @@ import java.util.Set; */ public class SelectResultValueBuilder { - private static final Comparator comparator = new Comparator() + private static final Comparator COMPARATOR = new Comparator() { @Override public int compare(EventHolder o1, EventHolder o2) @@ -133,7 +133,7 @@ public class SelectResultValueBuilder protected Queue instantiatePQueue() { int threshold = pagingSpec.getThreshold(); - return MinMaxPriorityQueue.orderedBy(descending ? comparator.reversed() : comparator) + return MinMaxPriorityQueue.orderedBy(descending ? COMPARATOR.reversed() : COMPARATOR) .maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE) .create(); } diff --git a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java index c39b6f9ed8e..eabd80b65f7 100644 --- a/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java +++ b/processing/src/main/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerFactory.java @@ -54,7 +54,7 @@ import java.util.concurrent.ExecutorService; public class TimeBoundaryQueryRunnerFactory implements QueryRunnerFactory, TimeBoundaryQuery> { - private static final TimeBoundaryQueryQueryToolChest toolChest = new TimeBoundaryQueryQueryToolChest(); + private static final TimeBoundaryQueryQueryToolChest TOOL_CHEST = new TimeBoundaryQueryQueryToolChest(); private final QueryWatcher queryWatcher; @Inject @@ -81,7 +81,7 @@ public class TimeBoundaryQueryRunnerFactory @Override public QueryToolChest, TimeBoundaryQuery> getToolchest() { - return toolChest; + return TOOL_CHEST; } private static class TimeBoundaryQueryRunner implements QueryRunner> diff --git a/processing/src/main/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpec.java b/processing/src/main/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpec.java index b281a96f50b..36f84618897 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpec.java +++ b/processing/src/main/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpec.java @@ -34,7 +34,7 @@ public class AlphaNumericTopNMetricSpec extends LexicographicTopNMetricSpec { private static final byte CACHE_TYPE_ID = 0x2; - protected static final Comparator comparator = StringComparators.ALPHANUMERIC; + protected static final Comparator COMPARATOR = StringComparators.ALPHANUMERIC; @JsonCreator public AlphaNumericTopNMetricSpec( @@ -47,7 +47,7 @@ public class AlphaNumericTopNMetricSpec extends LexicographicTopNMetricSpec @Override public Comparator getComparator(List aggregatorSpecs, List postAggregatorSpecs) { - return comparator; + return COMPARATOR; } @Override diff --git a/processing/src/main/java/org/apache/druid/query/topn/Generic1AggPooledTopNScannerPrototype.java b/processing/src/main/java/org/apache/druid/query/topn/Generic1AggPooledTopNScannerPrototype.java index e94b31e85e3..3200c625b2f 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/Generic1AggPooledTopNScannerPrototype.java +++ b/processing/src/main/java/org/apache/druid/query/topn/Generic1AggPooledTopNScannerPrototype.java @@ -35,7 +35,7 @@ public final class Generic1AggPooledTopNScannerPrototype implements Generic1AggP * It should be checked with a tool like https://github.com/AdoptOpenJDK/jitwatch that C2 compiler output for this * method doesn't have any method calls in the while loop, i. e. all method calls are inlined. To be able to see * assembly of this method in JITWatch and other similar tools, {@link - * PooledTopNAlgorithm#specializeGeneric1AggPooledTopN} should be turned off. Note that in this case the benchmark + * PooledTopNAlgorithm#SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN} should be turned off. Note that in this case the benchmark * should be "naturally monomorphic", i. e. execute this method always with the same runtime shape. * * If the while loop contains not inlined method calls, it should be considered as a performance bug. diff --git a/processing/src/main/java/org/apache/druid/query/topn/Generic2AggPooledTopNScannerPrototype.java b/processing/src/main/java/org/apache/druid/query/topn/Generic2AggPooledTopNScannerPrototype.java index 2f70fdd7380..d7536cb087f 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/Generic2AggPooledTopNScannerPrototype.java +++ b/processing/src/main/java/org/apache/druid/query/topn/Generic2AggPooledTopNScannerPrototype.java @@ -35,7 +35,7 @@ public final class Generic2AggPooledTopNScannerPrototype implements Generic2AggP * It should be checked with a tool like https://github.com/AdoptOpenJDK/jitwatch that C2 compiler output for this * method doesn't have any method calls in the while loop, i. e. all method calls are inlined. To be able to see * assembly of this method in JITWatch and other similar tools, {@link - * PooledTopNAlgorithm#specializeGeneric2AggPooledTopN} should be turned off. Note that in this case the benchmark + * PooledTopNAlgorithm#SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN} should be turned off. Note that in this case the benchmark * should be "naturally monomorphic", i. e. execute this method always with the same runtime shape. * * If the while loop contains not inlined method calls, it should be considered as a performance bug. diff --git a/processing/src/main/java/org/apache/druid/query/topn/Historical1SimpleDoubleAggPooledTopNScannerPrototype.java b/processing/src/main/java/org/apache/druid/query/topn/Historical1SimpleDoubleAggPooledTopNScannerPrototype.java index aee95f13471..c3a09dbacbe 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/Historical1SimpleDoubleAggPooledTopNScannerPrototype.java +++ b/processing/src/main/java/org/apache/druid/query/topn/Historical1SimpleDoubleAggPooledTopNScannerPrototype.java @@ -42,7 +42,7 @@ public class Historical1SimpleDoubleAggPooledTopNScannerPrototype * It should be checked with a tool like https://github.com/AdoptOpenJDK/jitwatch that C2 compiler output for this * method doesn't have any method calls in the while loop, i. e. all method calls are inlined. To be able to see * assembly of this method in JITWatch and other similar tools, {@link - * PooledTopNAlgorithm#specializeHistorical1SimpleDoubleAggPooledTopN} should be turned off. Note that in this case + * PooledTopNAlgorithm#SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN} should be turned off. Note that in this case * the benchmark should be "naturally monomorphic", i. e. execute this method always with the same runtime shape. * * If the while loop contains not inlined method calls, it should be considered as a performance bug. diff --git a/processing/src/main/java/org/apache/druid/query/topn/HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype.java b/processing/src/main/java/org/apache/druid/query/topn/HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype.java index 850a885a854..adedbd75fe6 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype.java +++ b/processing/src/main/java/org/apache/druid/query/topn/HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype.java @@ -41,7 +41,7 @@ public class HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPr * It should be checked with a tool like https://github.com/AdoptOpenJDK/jitwatch that C2 compiler output for this * method doesn't have any method calls in the while loop, i. e. all method calls are inlined. To be able to see * assembly of this method in JITWatch and other similar tools, {@link - * PooledTopNAlgorithm#specializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN} should be turned off. + * PooledTopNAlgorithm#SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN} should be turned off. * Note that in this case the benchmark should be "naturally monomorphic", i. e. execute this method always with the * same runtime shape. * diff --git a/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java b/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java index 3234d852c5f..c546b6f97c6 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java +++ b/processing/src/main/java/org/apache/druid/query/topn/PooledTopNAlgorithm.java @@ -53,13 +53,13 @@ import java.util.List; public class PooledTopNAlgorithm extends BaseTopNAlgorithm { - private static boolean specializeGeneric1AggPooledTopN = + private static boolean SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = !Boolean.getBoolean("dontSpecializeGeneric1AggPooledTopN"); - private static boolean specializeGeneric2AggPooledTopN = + private static boolean SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = !Boolean.getBoolean("dontSpecializeGeneric2AggPooledTopN"); - private static boolean specializeHistorical1SimpleDoubleAggPooledTopN = + private static boolean SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = !Boolean.getBoolean("dontSpecializeHistorical1SimpleDoubleAggPooledTopN"); - private static boolean specializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN = + private static boolean SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = !Boolean.getBoolean("dontSpecializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN"); /** @@ -68,38 +68,38 @@ public class PooledTopNAlgorithm @VisibleForTesting static void setSpecializeGeneric1AggPooledTopN(boolean value) { - PooledTopNAlgorithm.specializeGeneric1AggPooledTopN = value; + PooledTopNAlgorithm.SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN = value; computeSpecializedScanAndAggregateImplementations(); } @VisibleForTesting static void setSpecializeGeneric2AggPooledTopN(boolean value) { - PooledTopNAlgorithm.specializeGeneric2AggPooledTopN = value; + PooledTopNAlgorithm.SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN = value; computeSpecializedScanAndAggregateImplementations(); } @VisibleForTesting static void setSpecializeHistorical1SimpleDoubleAggPooledTopN(boolean value) { - PooledTopNAlgorithm.specializeHistorical1SimpleDoubleAggPooledTopN = value; + PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value; computeSpecializedScanAndAggregateImplementations(); } @VisibleForTesting static void setSpecializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN(boolean value) { - PooledTopNAlgorithm.specializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN = value; + PooledTopNAlgorithm.SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN = value; computeSpecializedScanAndAggregateImplementations(); } - private static final Generic1AggPooledTopNScanner defaultGeneric1AggScanner = + private static final Generic1AggPooledTopNScanner DEFAULT_GENERIC_ONE_AGG_SCANNER = new Generic1AggPooledTopNScannerPrototype(); - private static final Generic2AggPooledTopNScanner defaultGeneric2AggScanner = + private static final Generic2AggPooledTopNScanner DEFAULT_GENERIC_TWO_AGG_SCANNER = new Generic2AggPooledTopNScannerPrototype(); - private static final Historical1AggPooledTopNScanner defaultHistorical1SimpleDoubleAggScanner = + private static final Historical1AggPooledTopNScanner DEFAULT_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_SCANNER = new Historical1SimpleDoubleAggPooledTopNScannerPrototype(); - private static final Historical1AggPooledTopNScanner defaultHistoricalSingleValueDimSelector1SimpleDoubleAggScanner = + private static final Historical1AggPooledTopNScanner DEFAULT_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_SCANNER = new HistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopNScannerPrototype(); private interface ScanAndAggregate @@ -115,7 +115,7 @@ public class PooledTopNAlgorithm ); } - private static final List specializedScanAndAggregateImplementations = new ArrayList<>(); + private static final List SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS = new ArrayList<>(); static { computeSpecializedScanAndAggregateImplementations(); @@ -123,10 +123,10 @@ public class PooledTopNAlgorithm private static void computeSpecializedScanAndAggregateImplementations() { - specializedScanAndAggregateImplementations.clear(); + SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS.clear(); // The order of the following `if` blocks matters, "more specialized" implementations go first - if (specializeHistoricalSingleValueDimSelector1SimpleDoubleAggPooledTopN) { - specializedScanAndAggregateImplementations.add((params, positions, theAggregators) -> { + if (SPECIALIZE_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN) { + SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS.add((params, positions, theAggregators) -> { if (theAggregators.length == 1) { BufferAggregator aggregator = theAggregators[0]; final Cursor cursor = params.getCursor(); @@ -143,15 +143,15 @@ public class PooledTopNAlgorithm positions, (SimpleDoubleBufferAggregator) aggregator, (HistoricalCursor) cursor, - defaultHistoricalSingleValueDimSelector1SimpleDoubleAggScanner + DEFAULT_HISTORICAL_SINGLE_VALUE_DIM_SELECTOR_ONE_SIMPLE_DOUBLE_AGG_SCANNER ); } } return -1; }); } - if (specializeHistorical1SimpleDoubleAggPooledTopN) { - specializedScanAndAggregateImplementations.add((params, positions, theAggregators) -> { + if (SPECIALIZE_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_POOLED_TOPN) { + SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS.add((params, positions, theAggregators) -> { if (theAggregators.length == 1) { BufferAggregator aggregator = theAggregators[0]; final Cursor cursor = params.getCursor(); @@ -168,23 +168,23 @@ public class PooledTopNAlgorithm positions, (SimpleDoubleBufferAggregator) aggregator, (HistoricalCursor) cursor, - defaultHistorical1SimpleDoubleAggScanner + DEFAULT_HISTORICAL_ONE_SIMPLE_DOUBLE_AGG_SCANNER ); } } return -1; }); } - if (specializeGeneric1AggPooledTopN) { - specializedScanAndAggregateImplementations.add((params, positions, theAggregators) -> { + if (SPECIALIZE_GENERIC_ONE_AGG_POOLED_TOPN) { + SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS.add((params, positions, theAggregators) -> { if (theAggregators.length == 1) { return scanAndAggregateGeneric1Agg(params, positions, theAggregators[0], params.getCursor()); } return -1; }); } - if (specializeGeneric2AggPooledTopN) { - specializedScanAndAggregateImplementations.add((params, positions, theAggregators) -> { + if (SPECIALIZE_GENERIC_TWO_AGG_POOLED_TOPN) { + SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS.add((params, positions, theAggregators) -> { if (theAggregators.length == 2) { return scanAndAggregateGeneric2Agg(params, positions, theAggregators, params.getCursor()); } @@ -322,7 +322,7 @@ public class PooledTopNAlgorithm final BufferAggregator[] theAggregators ) { - for (ScanAndAggregate specializedScanAndAggregate : specializedScanAndAggregateImplementations) { + for (ScanAndAggregate specializedScanAndAggregate : SPECIALIZED_SCAN_AND_AGGREGATE_IMPLEMENTATIONS) { long processedRows = specializedScanAndAggregate.scanAndAggregate(params, positions, theAggregators); if (processedRows >= 0) { BaseQuery.checkInterrupted(); @@ -375,7 +375,7 @@ public class PooledTopNAlgorithm Class prototypeClass = Generic1AggPooledTopNScannerPrototype.class; SpecializationState specializationState = SpecializationService .getSpecializationState(prototypeClass, runtimeShape); - Generic1AggPooledTopNScanner scanner = specializationState.getSpecializedOrDefault(defaultGeneric1AggScanner); + Generic1AggPooledTopNScanner scanner = specializationState.getSpecializedOrDefault(DEFAULT_GENERIC_ONE_AGG_SCANNER); long processedRows = scanner.scanAndAggregate( params.getDimSelector(), aggregator, @@ -399,7 +399,7 @@ public class PooledTopNAlgorithm Class prototypeClass = Generic2AggPooledTopNScannerPrototype.class; SpecializationState specializationState = SpecializationService .getSpecializationState(prototypeClass, runtimeShape); - Generic2AggPooledTopNScanner scanner = specializationState.getSpecializedOrDefault(defaultGeneric2AggScanner); + Generic2AggPooledTopNScanner scanner = specializationState.getSpecializedOrDefault(DEFAULT_GENERIC_TWO_AGG_SCANNER); int[] aggregatorSizes = params.getAggregatorSizes(); long processedRows = scanner.scanAndAggregate( params.getDimSelector(), diff --git a/processing/src/main/java/org/apache/druid/query/topn/TopNNumericResultBuilder.java b/processing/src/main/java/org/apache/druid/query/topn/TopNNumericResultBuilder.java index 09f41599fe2..561c104a6a1 100644 --- a/processing/src/main/java/org/apache/druid/query/topn/TopNNumericResultBuilder.java +++ b/processing/src/main/java/org/apache/druid/query/topn/TopNNumericResultBuilder.java @@ -48,7 +48,7 @@ public class TopNNumericResultBuilder implements TopNResultBuilder private final List postAggs; private final PriorityQueue pQueue; private final String[] aggFactoryNames; - private static final Comparator dimValueComparator = new Comparator() + private static final Comparator DIM_VALUE_COMPARATOR = new Comparator() { @Override public int compare(Comparable o1, Comparable o2) @@ -96,7 +96,7 @@ public class TopNNumericResultBuilder implements TopNResultBuilder int retVal = metricComparator.compare(d1.getTopNMetricVal(), d2.getTopNMetricVal()); if (retVal == 0) { - retVal = dimValueComparator.compare(d1.getDimValue(), d2.getDimValue()); + retVal = DIM_VALUE_COMPARATOR.compare(d1.getDimValue(), d2.getDimValue()); } return retVal; @@ -228,7 +228,7 @@ public class TopNNumericResultBuilder implements TopNResultBuilder int retVal = metricComparator.compare(d2.getTopNMetricVal(), d1.getTopNMetricVal()); if (retVal == 0) { - retVal = dimValueComparator.compare(d1.getDimValue(), d2.getDimValue()); + retVal = DIM_VALUE_COMPARATOR.compare(d1.getDimValue(), d2.getDimValue()); } return retVal; diff --git a/processing/src/main/java/org/apache/druid/segment/CompressedPools.java b/processing/src/main/java/org/apache/druid/segment/CompressedPools.java index 19dfa4458df..7137a1c3be9 100644 --- a/processing/src/main/java/org/apache/druid/segment/CompressedPools.java +++ b/processing/src/main/java/org/apache/druid/segment/CompressedPools.java @@ -37,7 +37,7 @@ public class CompressedPools private static final Logger log = new Logger(CompressedPools.class); public static final int BUFFER_SIZE = 0x10000; - private static final NonBlockingPool bufferRecyclerPool = new StupidPool<>( + private static final NonBlockingPool BUFFER_RECYCLER_POOL = new StupidPool<>( "bufferRecyclerPool", new Supplier() { @@ -54,10 +54,10 @@ public class CompressedPools public static ResourceHolder getBufferRecycler() { - return bufferRecyclerPool.take(); + return BUFFER_RECYCLER_POOL.take(); } - private static final NonBlockingPool outputBytesPool = new StupidPool( + private static final NonBlockingPool OUTPUT_BYTES_POOL = new StupidPool( "outputBytesPool", new Supplier() { @@ -74,10 +74,10 @@ public class CompressedPools public static ResourceHolder getOutputBytes() { - return outputBytesPool.take(); + return OUTPUT_BYTES_POOL.take(); } - private static final NonBlockingPool bigEndByteBufPool = new StupidPool( + private static final NonBlockingPool BIG_ENDIAN_BYTE_BUF_POOL = new StupidPool( "bigEndByteBufPool", new Supplier() { @@ -92,7 +92,7 @@ public class CompressedPools } ); - private static final NonBlockingPool littleEndByteBufPool = new StupidPool( + private static final NonBlockingPool LITTLE_ENDIAN_BYTE_BUF_POOL = new StupidPool( "littleEndByteBufPool", new Supplier() { @@ -110,8 +110,8 @@ public class CompressedPools public static ResourceHolder getByteBuf(ByteOrder order) { if (order == ByteOrder.LITTLE_ENDIAN) { - return littleEndByteBufPool.take(); + return LITTLE_ENDIAN_BYTE_BUF_POOL.take(); } - return bigEndByteBufPool.take(); + return BIG_ENDIAN_BYTE_BUF_POOL.take(); } } diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java b/processing/src/main/java/org/apache/druid/segment/IndexIO.java index a6257134539..f577fe6fd61 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java @@ -91,7 +91,7 @@ public class IndexIO private final Map indexLoaders; private static final EmittingLogger log = new EmittingLogger(IndexIO.class); - private static final SerializerUtils serializerUtils = new SerializerUtils(); + private static final SerializerUtils SERIALIZER_UTILS = new SerializerUtils(); private final ObjectMapper mapper; @@ -329,7 +329,7 @@ public class IndexIO GenericIndexed.STRING_STRATEGY, smooshedFiles ); - final Interval dataInterval = Intervals.of(serializerUtils.readString(indexBuffer)); + final Interval dataInterval = Intervals.of(SERIALIZER_UTILS.readString(indexBuffer)); final BitmapSerdeFactory bitmapSerdeFactory = new BitmapSerde.LegacyBitmapSerdeFactory(); CompressedColumnarLongsSupplier timestamps = CompressedColumnarLongsSupplier.fromByteBuffer( @@ -354,7 +354,7 @@ public class IndexIO for (String dimension : IndexedIterable.create(availableDimensions)) { ByteBuffer dimBuffer = smooshedFiles.mapFile(makeDimFile(inDir, dimension).getName()); - String fileDimensionName = serializerUtils.readString(dimBuffer); + String fileDimensionName = SERIALIZER_UTILS.readString(dimBuffer); Preconditions.checkState( dimension.equals(fileDimensionName), "Dimension file[%s] has dimension[%s] in it!?", @@ -369,7 +369,7 @@ public class IndexIO ByteBuffer invertedBuffer = smooshedFiles.mapFile("inverted.drd"); for (int i = 0; i < availableDimensions.size(); ++i) { bitmaps.put( - serializerUtils.readString(invertedBuffer), + SERIALIZER_UTILS.readString(invertedBuffer), GenericIndexed.read(invertedBuffer, bitmapSerdeFactory.getObjectStrategy()) ); } @@ -378,7 +378,7 @@ public class IndexIO ByteBuffer spatialBuffer = smooshedFiles.mapFile("spatial.drd"); while (spatialBuffer != null && spatialBuffer.hasRemaining()) { spatialIndexed.put( - serializerUtils.readString(spatialBuffer), + SERIALIZER_UTILS.readString(spatialBuffer), new ImmutableRTreeObjectStrategy(bitmapSerdeFactory.getBitmapFactory()).fromByteBufferWithSize( spatialBuffer ) @@ -552,7 +552,7 @@ public class IndexIO * this information is appended to the end of index.drd. */ if (indexBuffer.hasRemaining()) { - segmentBitmapSerdeFactory = mapper.readValue(serializerUtils.readString(indexBuffer), BitmapSerdeFactory.class); + segmentBitmapSerdeFactory = mapper.readValue(SERIALIZER_UTILS.readString(indexBuffer), BitmapSerdeFactory.class); } else { segmentBitmapSerdeFactory = new BitmapSerde.LegacyBitmapSerdeFactory(); } @@ -562,7 +562,7 @@ public class IndexIO if (metadataBB != null) { try { metadata = mapper.readValue( - serializerUtils.readBytes(metadataBB, metadataBB.remaining()), + SERIALIZER_UTILS.readBytes(metadataBB, metadataBB.remaining()), Metadata.class ); } @@ -606,7 +606,7 @@ public class IndexIO throws IOException { ColumnDescriptor serde = mapper.readValue( - serializerUtils.readString(byteBuffer), ColumnDescriptor.class + SERIALIZER_UTILS.readString(byteBuffer), ColumnDescriptor.class ); return serde.read(byteBuffer, columnConfig, smooshedFiles); } diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMerger.java b/processing/src/main/java/org/apache/druid/segment/IndexMerger.java index b26e9caa849..c1d42eac159 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexMerger.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexMerger.java @@ -61,7 +61,7 @@ public interface IndexMerger { Logger log = new Logger(IndexMerger.class); - SerializerUtils serializerUtils = new SerializerUtils(); + SerializerUtils SERIALIZER_UTILS = new SerializerUtils(); int INVALID_ROW = -1; static List getMergedDimensionsFromQueryableIndexes(List indexes) diff --git a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java index de6284926fa..aeb8fbb7eca 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexMergerV9.java @@ -289,7 +289,7 @@ public class IndexMergerV9 implements IndexMerger final long numBytes = cols.getSerializedSize() + dims.getSerializedSize() + 16 - + serializerUtils.getSerializedStringByteSize(bitmapSerdeFactoryType); + + SERIALIZER_UTILS.getSerializedStringByteSize(bitmapSerdeFactoryType); final SmooshedWriter writer = v9Smoosher.addWithSmooshedWriter("index.drd", numBytes); cols.writeTo(writer, v9Smoosher); @@ -304,10 +304,10 @@ public class IndexMergerV9 implements IndexMerger } final Interval dataInterval = new Interval(minTime, maxTime); - serializerUtils.writeLong(writer, dataInterval.getStartMillis()); - serializerUtils.writeLong(writer, dataInterval.getEndMillis()); + SERIALIZER_UTILS.writeLong(writer, dataInterval.getStartMillis()); + SERIALIZER_UTILS.writeLong(writer, dataInterval.getEndMillis()); - serializerUtils.writeString(writer, bitmapSerdeFactoryType); + SERIALIZER_UTILS.writeString(writer, bitmapSerdeFactoryType); writer.close(); IndexIO.checkFileSize(new File(outDir, "index.drd")); @@ -450,7 +450,7 @@ public class IndexMergerV9 implements IndexMerger ) throws IOException { ZeroCopyByteArrayOutputStream specBytes = new ZeroCopyByteArrayOutputStream(); - serializerUtils.writeString(specBytes, mapper.writeValueAsString(serdeficator)); + SERIALIZER_UTILS.writeString(specBytes, mapper.writeValueAsString(serdeficator)); try (SmooshedWriter channel = v9Smoosher.addWithSmooshedWriter( columnName, specBytes.size() + serdeficator.getSerializedSize() diff --git a/processing/src/main/java/org/apache/druid/segment/MetricHolder.java b/processing/src/main/java/org/apache/druid/segment/MetricHolder.java index 8f4ee20ab21..11d4b688712 100644 --- a/processing/src/main/java/org/apache/druid/segment/MetricHolder.java +++ b/processing/src/main/java/org/apache/druid/segment/MetricHolder.java @@ -35,18 +35,18 @@ import java.nio.ByteOrder; */ public class MetricHolder { - private static final byte[] version = new byte[]{0x0}; - private static final SerializerUtils serializerUtils = new SerializerUtils(); + private static final byte[] VERSION = new byte[]{0x0}; + private static final SerializerUtils SERIALIZER_UTILS = new SerializerUtils(); public static MetricHolder fromByteBuffer(ByteBuffer buf) { final byte ver = buf.get(); - if (version[0] != ver) { + if (VERSION[0] != ver) { throw new ISE("Unknown version[%s] of MetricHolder", ver); } - final String metricName = serializerUtils.readString(buf); - final String typeName = serializerUtils.readString(buf); + final String metricName = SERIALIZER_UTILS.readString(buf); + final String typeName = SERIALIZER_UTILS.readString(buf); MetricHolder holder = new MetricHolder(metricName, typeName); switch (holder.type) { diff --git a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarDoublesSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarDoublesSerializer.java index 3558473a931..c2247ac18d9 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarDoublesSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarDoublesSerializer.java @@ -37,7 +37,7 @@ import java.nio.channels.WritableByteChannel; */ public class BlockLayoutColumnarDoublesSerializer implements ColumnarDoublesSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((BlockLayoutColumnarDoublesSerializer x) -> CompressedColumnarDoublesSuppliers.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> CompressedPools.BUFFER_SIZE / Double.BYTES) @@ -95,14 +95,14 @@ public class BlockLayoutColumnarDoublesSerializer implements ColumnarDoublesSeri public long getSerializedSize() throws IOException { writeEndBuffer(); - return metaSerdeHelper.size(this) + flattener.getSerializedSize(); + return META_SERDE_HELPER.size(this) + flattener.getSerializedSize(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeEndBuffer(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); flattener.writeTo(channel, smoosher); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarFloatsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarFloatsSerializer.java index aa225b80efe..e247252f6b3 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarFloatsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarFloatsSerializer.java @@ -37,7 +37,7 @@ import java.nio.channels.WritableByteChannel; */ public class BlockLayoutColumnarFloatsSerializer implements ColumnarFloatsSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((BlockLayoutColumnarFloatsSerializer x) -> CompressedColumnarFloatsSupplier.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> CompressedPools.BUFFER_SIZE / Float.BYTES) @@ -100,14 +100,14 @@ public class BlockLayoutColumnarFloatsSerializer implements ColumnarFloatsSerial public long getSerializedSize() throws IOException { writeEndBuffer(); - return metaSerdeHelper.size(this) + flattener.getSerializedSize(); + return META_SERDE_HELPER.size(this) + flattener.getSerializedSize(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeEndBuffer(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); flattener.writeTo(channel, smoosher); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java index 778a06140de..cb404025cc8 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/BlockLayoutColumnarLongsSerializer.java @@ -36,7 +36,7 @@ import java.nio.channels.WritableByteChannel; */ public class BlockLayoutColumnarLongsSerializer implements ColumnarLongsSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((BlockLayoutColumnarLongsSerializer x) -> CompressedColumnarLongsSupplier.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> x.sizePer) @@ -106,14 +106,14 @@ public class BlockLayoutColumnarLongsSerializer implements ColumnarLongsSerializ public long getSerializedSize() throws IOException { writeEndBuffer(); - return metaSerdeHelper.size(this) + flattener.getSerializedSize(); + return META_SERDE_HELPER.size(this) + flattener.getSerializedSize(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeEndBuffer(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); flattener.writeTo(channel, smoosher); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarFloatsSupplier.java b/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarFloatsSupplier.java index 17b8af1c57c..64b77f07aed 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarFloatsSupplier.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarFloatsSupplier.java @@ -36,7 +36,7 @@ public class CompressedColumnarFloatsSupplier implements Supplier metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedColumnarFloatsSupplier x) -> VERSION) .writeInt(x -> x.totalSize) .writeInt(x -> x.sizePer) @@ -72,13 +72,13 @@ public class CompressedColumnarFloatsSupplier implements Supplier metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedColumnarIntsSerializer x) -> VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> x.chunkFactor) @@ -116,14 +116,14 @@ public class CompressedColumnarIntsSerializer extends SingleValueColumnarIntsSer public long getSerializedSize() throws IOException { writeEndBuffer(); - return metaSerdeHelper.size(this) + flattener.getSerializedSize(); + return META_SERDE_HELPER.size(this) + flattener.getSerializedSize(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeEndBuffer(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); flattener.writeTo(channel, smoosher); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarIntsSupplier.java b/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarIntsSupplier.java index ca154a93667..f58ea592c7e 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarIntsSupplier.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressedColumnarIntsSupplier.java @@ -43,7 +43,7 @@ public class CompressedColumnarIntsSupplier implements WritableSupplier metaSerdeHelper = MetaSerdeHelper + private static MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedColumnarIntsSupplier x) -> VERSION) .writeInt(x -> x.totalSize) .writeInt(x -> x.sizePer) @@ -98,13 +98,13 @@ public class CompressedColumnarIntsSupplier implements WritableSupplier, public static final byte LZF_VERSION = 0x1; public static final byte VERSION = 0x2; - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedColumnarLongsSupplier x) -> VERSION) .writeInt(x -> x.totalSize) .writeInt(x -> x.sizePer) @@ -87,13 +87,13 @@ public class CompressedColumnarLongsSupplier implements Supplier, @Override public long getSerializedSize() { - return metaSerdeHelper.size(this) + (long) buffer.remaining(); + return META_SERDE_HELPER.size(this) + (long) buffer.remaining(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); Channels.writeFully(channel, buffer.asReadOnlyBuffer()); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializer.java index 72f9fc988b3..0f6c54a6006 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializer.java @@ -39,7 +39,7 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn { private static final byte VERSION = CompressedVSizeColumnarIntsSupplier.VERSION; - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedVSizeColumnarIntsSerializer x) -> VERSION) .writeByte(x -> ByteUtils.checkedCast(x.numBytes)) .writeInt(x -> x.numInserted) @@ -155,14 +155,14 @@ public class CompressedVSizeColumnarIntsSerializer extends SingleValueColumnarIn public long getSerializedSize() throws IOException { writeEndBuffer(); - return metaSerdeHelper.size(this) + flattener.getSerializedSize(); + return META_SERDE_HELPER.size(this) + flattener.getSerializedSize(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeEndBuffer(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); flattener.writeTo(channel, smoosher); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java b/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java index 7f10c103b29..323945c523c 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSupplier.java @@ -42,7 +42,7 @@ public class CompressedVSizeColumnarIntsSupplier implements WritableSupplier metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((CompressedVSizeColumnarIntsSupplier x) -> VERSION) .writeByte(x -> ByteUtils.checkedCast(x.numBytes)) .writeInt(x -> x.totalSize) @@ -124,13 +124,13 @@ public class CompressedVSizeColumnarIntsSupplier implements WritableSupplier { - private static final byte version = 0x2; + private static final byte VERSION = 0x2; /** * See class-level comment @@ -73,7 +73,7 @@ public class CompressedVSizeColumnarMultiIntsSupplier implements WritableSupplie @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { - Channels.writeFully(channel, ByteBuffer.wrap(new byte[]{version})); + Channels.writeFully(channel, ByteBuffer.wrap(new byte[]{VERSION})); offsetSupplier.writeTo(channel, smoosher); valueSupplier.writeTo(channel, smoosher); } @@ -82,7 +82,7 @@ public class CompressedVSizeColumnarMultiIntsSupplier implements WritableSupplie { byte versionFromBuffer = buffer.get(); - if (versionFromBuffer == version) { + if (versionFromBuffer == VERSION) { CompressedVSizeColumnarIntsSupplier offsetSupplier = CompressedVSizeColumnarIntsSupplier.fromByteBuffer( buffer, order diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java index b7e7c96b26b..ddacb9095a9 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java @@ -199,11 +199,11 @@ public class CompressionFactory return id; } - static final Map idMap = new HashMap<>(); + static final Map ID_MAP = new HashMap<>(); static { for (LongEncodingFormat format : LongEncodingFormat.values()) { - idMap.put(format.getId(), format); + ID_MAP.put(format.getId(), format); } } @@ -211,7 +211,7 @@ public class CompressionFactory public static LongEncodingFormat forId(byte id) { - return idMap.get(id); + return ID_MAP.get(id); } } diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java index b5347c9a8d4..70c72e5bed9 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java +++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionStrategy.java @@ -52,13 +52,13 @@ public enum CompressionStrategy @Override public Decompressor getDecompressor() { - return LZFDecompressor.defaultDecompressor; + return LZFDecompressor.DEFAULT_DECOMPRESSOR; } @Override public Compressor getCompressor() { - return LZFCompressor.defaultCompressor; + return LZFCompressor.DEFAULT_COMPRESSOR; } }, @@ -66,26 +66,26 @@ public enum CompressionStrategy @Override public Decompressor getDecompressor() { - return LZ4Decompressor.defaultDecompressor; + return LZ4Decompressor.DEFAULT_COMPRESSOR; } @Override public Compressor getCompressor() { - return LZ4Compressor.defaultCompressor; + return LZ4Compressor.DEFAULT_COMPRESSOR; } }, UNCOMPRESSED((byte) 0xFF) { @Override public Decompressor getDecompressor() { - return UncompressedDecompressor.defaultDecompressor; + return UncompressedDecompressor.DEFAULT_DECOMPRESSOR; } @Override public Compressor getCompressor() { - return UncompressedCompressor.defaultCompressor; + return UncompressedCompressor.DEFAULT_COMPRESSOR; } }, /** @@ -139,17 +139,17 @@ public enum CompressionStrategy return valueOf(StringUtils.toUpperCase(name)); } - static final Map idMap = new HashMap<>(); + static final Map ID_MAP = new HashMap<>(); static { for (CompressionStrategy strategy : CompressionStrategy.values()) { - idMap.put(strategy.getId(), strategy); + ID_MAP.put(strategy.getId(), strategy); } } public static CompressionStrategy forId(byte id) { - return idMap.get(id); + return ID_MAP.get(id); } // TODO remove this method and change all its callers to use all CompressionStrategy values when NONE type is supported by all types @@ -202,7 +202,7 @@ public enum CompressionStrategy public static class UncompressedCompressor extends Compressor { - private static final UncompressedCompressor defaultCompressor = new UncompressedCompressor(); + private static final UncompressedCompressor DEFAULT_COMPRESSOR = new UncompressedCompressor(); @Override ByteBuffer allocateOutBuffer(int inputSize, Closer closer) @@ -219,7 +219,7 @@ public enum CompressionStrategy public static class UncompressedDecompressor implements Decompressor { - private static final UncompressedDecompressor defaultDecompressor = new UncompressedDecompressor(); + private static final UncompressedDecompressor DEFAULT_DECOMPRESSOR = new UncompressedDecompressor(); @Override public void decompress(ByteBuffer in, int numBytes, ByteBuffer out) @@ -234,7 +234,7 @@ public enum CompressionStrategy public static class LZFDecompressor implements Decompressor { - private static final LZFDecompressor defaultDecompressor = new LZFDecompressor(); + private static final LZFDecompressor DEFAULT_DECOMPRESSOR = new LZFDecompressor(); @Override public void decompress(ByteBuffer in, int numBytes, ByteBuffer out) @@ -257,7 +257,7 @@ public enum CompressionStrategy public static class LZFCompressor extends Compressor { - private static final LZFCompressor defaultCompressor = new LZFCompressor(); + private static final LZFCompressor DEFAULT_COMPRESSOR = new LZFCompressor(); @Override public ByteBuffer allocateOutBuffer(int inputSize, Closer closer) @@ -286,15 +286,15 @@ public enum CompressionStrategy public static class LZ4Decompressor implements Decompressor { - private static final LZ4SafeDecompressor lz4Safe = LZ4Factory.fastestInstance().safeDecompressor(); - private static final LZ4Decompressor defaultDecompressor = new LZ4Decompressor(); + private static final LZ4SafeDecompressor LZ4_SAFE = LZ4Factory.fastestInstance().safeDecompressor(); + private static final LZ4Decompressor DEFAULT_COMPRESSOR = new LZ4Decompressor(); @Override public void decompress(ByteBuffer in, int numBytes, ByteBuffer out) { // Since decompressed size is NOT known, must use lz4Safe // lz4Safe.decompress does not modify buffer positions - final int numDecompressedBytes = lz4Safe.decompress( + final int numDecompressedBytes = LZ4_SAFE.decompress( in, in.position(), numBytes, @@ -309,8 +309,8 @@ public enum CompressionStrategy public static class LZ4Compressor extends Compressor { - private static final LZ4Compressor defaultCompressor = new LZ4Compressor(); - private static final net.jpountz.lz4.LZ4Compressor lz4High = LZ4Factory.fastestInstance().highCompressor(); + private static final LZ4Compressor DEFAULT_COMPRESSOR = new LZ4Compressor(); + private static final net.jpountz.lz4.LZ4Compressor LZ4_HIGH = LZ4Factory.fastestInstance().highCompressor(); static { logLZ4State(); @@ -327,7 +327,7 @@ public enum CompressionStrategy @Override ByteBuffer allocateOutBuffer(int inputSize, Closer closer) { - ByteBuffer outBuffer = ByteBuffer.allocateDirect(lz4High.maxCompressedLength(inputSize)); + ByteBuffer outBuffer = ByteBuffer.allocateDirect(LZ4_HIGH.maxCompressedLength(inputSize)); closer.register(() -> ByteBufferUtils.free(outBuffer)); return outBuffer; } @@ -337,7 +337,7 @@ public enum CompressionStrategy { out.clear(); int position = in.position(); - lz4High.compress(in, out); + LZ4_HIGH.compress(in, out); in.position(position); out.flip(); return out; diff --git a/processing/src/main/java/org/apache/druid/segment/data/ConciseBitmapSerdeFactory.java b/processing/src/main/java/org/apache/druid/segment/data/ConciseBitmapSerdeFactory.java index 3788b9247a3..b81c971b370 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/ConciseBitmapSerdeFactory.java +++ b/processing/src/main/java/org/apache/druid/segment/data/ConciseBitmapSerdeFactory.java @@ -31,19 +31,19 @@ import java.nio.ByteBuffer; */ public class ConciseBitmapSerdeFactory implements BitmapSerdeFactory { - private static final ObjectStrategy objectStrategy = new ImmutableConciseSetObjectStrategy(); - private static final BitmapFactory bitmapFactory = new ConciseBitmapFactory(); + private static final ObjectStrategy OBJECT_STRATEGY = new ImmutableConciseSetObjectStrategy(); + private static final BitmapFactory BITMAP_FACTORY = new ConciseBitmapFactory(); @Override public ObjectStrategy getObjectStrategy() { - return objectStrategy; + return OBJECT_STRATEGY; } @Override public BitmapFactory getBitmapFactory() { - return bitmapFactory; + return BITMAP_FACTORY; } private static class ImmutableConciseSetObjectStrategy implements ObjectStrategy diff --git a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarDoublesSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarDoublesSerializer.java index ed178638e18..540b0b71bb1 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarDoublesSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarDoublesSerializer.java @@ -34,7 +34,7 @@ import java.nio.channels.WritableByteChannel; */ public class EntireLayoutColumnarDoublesSerializer implements ColumnarDoublesSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((EntireLayoutColumnarDoublesSerializer x) -> CompressedColumnarDoublesSuppliers.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> 0) @@ -71,13 +71,13 @@ public class EntireLayoutColumnarDoublesSerializer implements ColumnarDoublesSer @Override public long getSerializedSize() throws IOException { - return metaSerdeHelper.size(this) + valuesOut.size(); + return META_SERDE_HELPER.size(this) + valuesOut.size(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); valuesOut.writeTo(channel); } } diff --git a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarFloatsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarFloatsSerializer.java index dc7e6063afe..1209b76217f 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarFloatsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarFloatsSerializer.java @@ -33,7 +33,7 @@ import java.nio.channels.WritableByteChannel; */ public class EntireLayoutColumnarFloatsSerializer implements ColumnarFloatsSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((EntireLayoutColumnarFloatsSerializer x) -> CompressedColumnarFloatsSupplier.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> 0) @@ -78,13 +78,13 @@ public class EntireLayoutColumnarFloatsSerializer implements ColumnarFloatsSeria @Override public long getSerializedSize() throws IOException { - return metaSerdeHelper.size(this) + valuesOut.size(); + return META_SERDE_HELPER.size(this) + valuesOut.size(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); valuesOut.writeTo(channel); } } diff --git a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarLongsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarLongsSerializer.java index 4a3c0126ba4..b29081bd0a5 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarLongsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/EntireLayoutColumnarLongsSerializer.java @@ -32,7 +32,7 @@ import java.nio.channels.WritableByteChannel; */ public class EntireLayoutColumnarLongsSerializer implements ColumnarLongsSerializer { - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((EntireLayoutColumnarLongsSerializer x) -> CompressedColumnarLongsSupplier.VERSION) .writeInt(x -> x.numInserted) .writeInt(x -> 0) @@ -77,14 +77,14 @@ public class EntireLayoutColumnarLongsSerializer implements ColumnarLongsSeriali public long getSerializedSize() throws IOException { writer.flush(); - return metaSerdeHelper.size(this) + valuesOut.size(); + return META_SERDE_HELPER.size(this) + valuesOut.size(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writer.flush(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); valuesOut.writeTo(channel); } } diff --git a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexed.java b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexed.java index 8a2bc5773e2..35c9a3fc8ae 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexed.java +++ b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexed.java @@ -85,7 +85,7 @@ public class GenericIndexed implements CloseableIndexed, Serializer static final int NULL_VALUE_SIZE_MARKER = -1; - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((GenericIndexed x) -> VERSION_ONE) .writeByte(x -> x.allowReverseLookup ? REVERSE_LOOKUP_ALLOWED : REVERSE_LOOKUP_DISALLOWED) .writeInt(x -> Ints.checkedCast(x.theBuffer.remaining() + (long) Integer.BYTES)) @@ -554,7 +554,7 @@ public class GenericIndexed implements CloseableIndexed, Serializer private long getSerializedSizeVersionOne() { - return metaSerdeHelper.size(this) + (long) theBuffer.remaining(); + return META_SERDE_HELPER.size(this) + (long) theBuffer.remaining(); } @Nullable @@ -612,7 +612,7 @@ public class GenericIndexed implements CloseableIndexed, Serializer private void writeToVersionOne(WritableByteChannel channel) throws IOException { - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); Channels.writeFully(channel, theBuffer.asReadOnlyBuffer()); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java index fd5c481f6cc..976c88f3c41 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java +++ b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java @@ -50,7 +50,7 @@ public class GenericIndexedWriter implements Serializer { private static int PAGE_SIZE = 4096; - private static final MetaSerdeHelper singleFileMetaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper SINGLE_FILE_META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((GenericIndexedWriter x) -> GenericIndexed.VERSION_ONE) .writeByte( x -> x.objectsSorted ? GenericIndexed.REVERSE_LOOKUP_ALLOWED : GenericIndexed.REVERSE_LOOKUP_DISALLOWED @@ -58,7 +58,7 @@ public class GenericIndexedWriter implements Serializer .writeInt(x -> Ints.checkedCast(x.headerOut.size() + x.valuesOut.size() + Integer.BYTES)) .writeInt(x -> x.numWritten); - private static final MetaSerdeHelper multiFileMetaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper MULTI_FILE_META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((GenericIndexedWriter x) -> GenericIndexed.VERSION_TWO) .writeByte( x -> x.objectsSorted ? GenericIndexed.REVERSE_LOOKUP_ALLOWED : GenericIndexed.REVERSE_LOOKUP_DISALLOWED @@ -276,9 +276,9 @@ public class GenericIndexedWriter implements Serializer { if (requireMultipleFiles) { // for multi-file version (version 2), getSerializedSize() returns number of bytes in meta file. - return multiFileMetaSerdeHelper.size(this); + return MULTI_FILE_META_SERDE_HELPER.size(this); } else { - return singleFileMetaSerdeHelper.size(this) + headerOut.size() + valuesOut.size(); + return SINGLE_FILE_META_SERDE_HELPER.size(this) + headerOut.size() + valuesOut.size(); } } @@ -308,7 +308,7 @@ public class GenericIndexedWriter implements Serializer numBytesWritten ); - singleFileMetaSerdeHelper.writeTo(channel, this); + SINGLE_FILE_META_SERDE_HELPER.writeTo(channel, this); headerOut.writeTo(channel); valuesOut.writeTo(channel); } @@ -332,7 +332,7 @@ public class GenericIndexedWriter implements Serializer } int bagSizePower = bagSizePower(); - multiFileMetaSerdeHelper.writeTo(channel, this); + MULTI_FILE_META_SERDE_HELPER.writeTo(channel, this); long previousValuePosition = 0; int bagSize = 1 << bagSizePower; diff --git a/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java b/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java index 5f34d0bc674..0a923ab0b26 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java +++ b/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java @@ -35,7 +35,7 @@ import java.nio.ByteBuffer; public class RoaringBitmapSerdeFactory implements BitmapSerdeFactory { private static final boolean DEFAULT_COMPRESS_RUN_ON_SERIALIZATION = true; - private static final ObjectStrategy objectStrategy = new ImmutableRoaringBitmapObjectStrategy(); + private static final ObjectStrategy OBJECT_STRATEGY = new ImmutableRoaringBitmapObjectStrategy(); private final boolean compressRunOnSerialization; private final BitmapFactory bitmapFactory; @@ -60,7 +60,7 @@ public class RoaringBitmapSerdeFactory implements BitmapSerdeFactory @Override public ObjectStrategy getObjectStrategy() { - return objectStrategy; + return OBJECT_STRATEGY; } @Override diff --git a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarInts.java b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarInts.java index d949e3c62e7..2523c51fad1 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarInts.java +++ b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarInts.java @@ -38,7 +38,7 @@ public class VSizeColumnarInts implements ColumnarInts, Comparable metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((VSizeColumnarInts x) -> VERSION) .writeByte(x -> ByteUtils.checkedCast(x.numBytes)) .writeInt(x -> x.buffer.remaining()); @@ -158,13 +158,13 @@ public class VSizeColumnarInts implements ColumnarInts, Comparable metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((VSizeColumnarIntsSerializer x) -> VERSION) .writeByte(x -> ByteUtils.checkedCast(x.numBytes)) .writeInt(x -> Ints.checkedCast(x.valuesOut.size())); @@ -79,14 +79,14 @@ public class VSizeColumnarIntsSerializer extends SingleValueColumnarIntsSerializ public long getSerializedSize() throws IOException { writeBufPadding(); - return metaSerdeHelper.size(this) + valuesOut.size(); + return META_SERDE_HELPER.size(this) + valuesOut.size(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { writeBufPadding(); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); valuesOut.writeTo(channel); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiInts.java b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiInts.java index 7a21817feea..a783294cea8 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiInts.java +++ b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiInts.java @@ -40,7 +40,7 @@ public class VSizeColumnarMultiInts implements ColumnarMultiInts, WritableSuppli { private static final byte VERSION = 0x1; - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((VSizeColumnarMultiInts x) -> VERSION) .writeByte(x -> ByteUtils.checkedCast(x.numBytes)) .writeInt(x -> Ints.checkedCast(x.theBuffer.remaining() + (long) Integer.BYTES)) @@ -153,13 +153,13 @@ public class VSizeColumnarMultiInts implements ColumnarMultiInts, WritableSuppli @Override public long getSerializedSize() { - return metaSerdeHelper.size(this) + (long) theBuffer.remaining(); + return META_SERDE_HELPER.size(this) + (long) theBuffer.remaining(); } @Override public void writeTo(WritableByteChannel channel, FileSmoosher smoosher) throws IOException { - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); Channels.writeFully(channel, theBuffer.asReadOnlyBuffer()); } diff --git a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiIntsSerializer.java b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiIntsSerializer.java index 6a85a46d56d..16fe48ddf68 100644 --- a/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiIntsSerializer.java +++ b/processing/src/main/java/org/apache/druid/segment/data/VSizeColumnarMultiIntsSerializer.java @@ -38,7 +38,7 @@ public class VSizeColumnarMultiIntsSerializer extends ColumnarMultiIntsSerialize { private static final byte VERSION = 0x1; - private static final MetaSerdeHelper metaSerdeHelper = MetaSerdeHelper + private static final MetaSerdeHelper META_SERDE_HELPER = MetaSerdeHelper .firstWriteByte((VSizeColumnarMultiIntsSerializer x) -> VERSION) .writeByte(x -> VSizeColumnarInts.getNumBytesForMax(x.maxId)) .writeInt(x -> Ints.checkedCast(x.headerOut.size() + x.valuesOut.size() + Integer.BYTES)) @@ -126,7 +126,7 @@ public class VSizeColumnarMultiIntsSerializer extends ColumnarMultiIntsSerialize public long getSerializedSize() throws IOException { writeNumBytesForMax(); - return metaSerdeHelper.size(this) + headerOut.size() + valuesOut.size(); + return META_SERDE_HELPER.size(this) + headerOut.size() + valuesOut.size(); } @Override @@ -149,7 +149,7 @@ public class VSizeColumnarMultiIntsSerializer extends ColumnarMultiIntsSerialize numBytesWritten ); - metaSerdeHelper.writeTo(channel, this); + META_SERDE_HELPER.writeTo(channel, this); headerOut.writeTo(channel); valuesOut.writeTo(channel); } diff --git a/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java b/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java index 4d1331af848..74dfcfe1e42 100644 --- a/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java +++ b/processing/src/main/java/org/apache/druid/segment/serde/ComplexMetrics.java @@ -29,27 +29,27 @@ import java.util.Map; */ public class ComplexMetrics { - private static final Map complexSerializers = new HashMap<>(); + private static final Map COMPLEX_SERIALIZERS = new HashMap<>(); @Nullable public static ComplexMetricSerde getSerdeForType(String type) { - return complexSerializers.get(type); + return COMPLEX_SERIALIZERS.get(type); } public static void registerSerde(String type, ComplexMetricSerde serde) { - if (complexSerializers.containsKey(type)) { - if (!complexSerializers.get(type).getClass().getName().equals(serde.getClass().getName())) { + if (COMPLEX_SERIALIZERS.containsKey(type)) { + if (!COMPLEX_SERIALIZERS.get(type).getClass().getName().equals(serde.getClass().getName())) { throw new ISE( "Incompatible serializer for type[%s] already exists. Expected [%s], found [%s].", type, serde.getClass().getName(), - complexSerializers.get(type).getClass().getName() + COMPLEX_SERIALIZERS.get(type).getClass().getName() ); } } else { - complexSerializers.put(type, serde); + COMPLEX_SERIALIZERS.put(type, serde); } } } diff --git a/processing/src/test/java/org/apache/druid/collections/bitmap/BitmapBenchmark.java b/processing/src/test/java/org/apache/druid/collections/bitmap/BitmapBenchmark.java index 52d550e2b1b..b12158bdbc1 100644 --- a/processing/src/test/java/org/apache/druid/collections/bitmap/BitmapBenchmark.java +++ b/processing/src/test/java/org/apache/druid/collections/bitmap/BitmapBenchmark.java @@ -47,15 +47,15 @@ public class BitmapBenchmark { public static final int LENGTH = 500_000; public static final int SIZE = 10_000; - static final ImmutableConciseSet concise[] = new ImmutableConciseSet[SIZE]; - static final ImmutableConciseSet offheapConcise[] = new ImmutableConciseSet[SIZE]; - static final ImmutableRoaringBitmap roaring[] = new ImmutableRoaringBitmap[SIZE]; - static final ImmutableRoaringBitmap immutableRoaring[] = new ImmutableRoaringBitmap[SIZE]; - static final ImmutableRoaringBitmap offheapRoaring[] = new ImmutableRoaringBitmap[SIZE]; - static final ImmutableBitmap genericConcise[] = new ImmutableBitmap[SIZE]; - static final ImmutableBitmap genericRoaring[] = new ImmutableBitmap[SIZE]; - static final ConciseBitmapFactory conciseFactory = new ConciseBitmapFactory(); - static final RoaringBitmapFactory roaringFactory = new RoaringBitmapFactory(); + static final ImmutableConciseSet CONCISE[] = new ImmutableConciseSet[SIZE]; + static final ImmutableConciseSet OFF_HEAP_CONCISE[] = new ImmutableConciseSet[SIZE]; + static final ImmutableRoaringBitmap ROARING[] = new ImmutableRoaringBitmap[SIZE]; + static final ImmutableRoaringBitmap IMMUTABLE_ROARING[] = new ImmutableRoaringBitmap[SIZE]; + static final ImmutableRoaringBitmap OFF_HEAP_ROARING[] = new ImmutableRoaringBitmap[SIZE]; + static final ImmutableBitmap GENERIC_CONCISE[] = new ImmutableBitmap[SIZE]; + static final ImmutableBitmap GENERIC_ROARING[] = new ImmutableBitmap[SIZE]; + static final ConciseBitmapFactory CONCISE_FACTORY = new ConciseBitmapFactory(); + static final RoaringBitmapFactory ROARING_FACTORY = new RoaringBitmapFactory(); static Random rand = new Random(0); static long totalConciseBytes = 0; static long totalRoaringBytes = 0; @@ -136,7 +136,7 @@ public class BitmapBenchmark @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) public void timeConciseUnion() { - ImmutableConciseSet union = ImmutableConciseSet.union(concise); + ImmutableConciseSet union = ImmutableConciseSet.union(CONCISE); Assert.assertEquals(unionCount, union.size()); } @@ -144,7 +144,7 @@ public class BitmapBenchmark @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) public void timeOffheapConciseUnion() { - ImmutableConciseSet union = ImmutableConciseSet.union(offheapConcise); + ImmutableConciseSet union = ImmutableConciseSet.union(OFF_HEAP_CONCISE); Assert.assertEquals(unionCount, union.size()); } @@ -152,7 +152,7 @@ public class BitmapBenchmark @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 2) public void timeGenericConciseUnion() { - ImmutableBitmap union = conciseFactory.union(Arrays.asList(genericConcise)); + ImmutableBitmap union = CONCISE_FACTORY.union(Arrays.asList(GENERIC_CONCISE)); Assert.assertEquals(unionCount, union.size()); } @@ -160,42 +160,42 @@ public class BitmapBenchmark @BenchmarkOptions(warmupRounds = 1, benchmarkRounds = 5) public void timeGenericConciseIntersection() { - ImmutableBitmap intersection = conciseFactory.intersection(Arrays.asList(genericConcise)); + ImmutableBitmap intersection = CONCISE_FACTORY.intersection(Arrays.asList(GENERIC_CONCISE)); Assert.assertTrue(intersection.size() >= minIntersection); } @Test public void timeRoaringUnion() { - ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(roaring).iterator()); + ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(ROARING).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test public void timeImmutableRoaringUnion() { - ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(immutableRoaring).iterator()); + ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(IMMUTABLE_ROARING).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test public void timeOffheapRoaringUnion() { - ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(offheapRoaring).iterator()); + ImmutableRoaringBitmap union = BufferFastAggregation.horizontal_or(Arrays.asList(OFF_HEAP_ROARING).iterator()); Assert.assertEquals(unionCount, union.getCardinality()); } @Test public void timeGenericRoaringUnion() { - ImmutableBitmap union = roaringFactory.union(Arrays.asList(genericRoaring)); + ImmutableBitmap union = ROARING_FACTORY.union(Arrays.asList(GENERIC_ROARING)); Assert.assertEquals(unionCount, union.size()); } @Test public void timeGenericRoaringIntersection() { - ImmutableBitmap intersection = roaringFactory.intersection(Arrays.asList(genericRoaring)); + ImmutableBitmap intersection = ROARING_FACTORY.intersection(Arrays.asList(GENERIC_ROARING)); Assert.assertTrue(intersection.size() >= minIntersection); } } diff --git a/processing/src/test/java/org/apache/druid/collections/bitmap/RangeBitmapBenchmarkTest.java b/processing/src/test/java/org/apache/druid/collections/bitmap/RangeBitmapBenchmarkTest.java index f84941297f8..67e949312d9 100644 --- a/processing/src/test/java/org/apache/druid/collections/bitmap/RangeBitmapBenchmarkTest.java +++ b/processing/src/test/java/org/apache/druid/collections/bitmap/RangeBitmapBenchmarkTest.java @@ -72,13 +72,13 @@ public class RangeBitmapBenchmarkTest extends BitmapBenchmark r.add(k); expectedUnion.set(k); } - concise[i] = ImmutableConciseSet.newImmutableFromMutable(c); - offheapConcise[i] = makeOffheapConcise(concise[i]); - roaring[i] = r; - immutableRoaring[i] = makeImmutableRoaring(r); - offheapRoaring[i] = makeOffheapRoaring(r); - genericConcise[i] = new WrappedImmutableConciseBitmap(offheapConcise[i]); - genericRoaring[i] = new WrappedImmutableRoaringBitmap(offheapRoaring[i]); + CONCISE[i] = ImmutableConciseSet.newImmutableFromMutable(c); + OFF_HEAP_CONCISE[i] = makeOffheapConcise(CONCISE[i]); + ROARING[i] = r; + IMMUTABLE_ROARING[i] = makeImmutableRoaring(r); + OFF_HEAP_ROARING[i] = makeOffheapRoaring(r); + GENERIC_CONCISE[i] = new WrappedImmutableConciseBitmap(OFF_HEAP_CONCISE[i]); + GENERIC_ROARING[i] = new WrappedImmutableRoaringBitmap(OFF_HEAP_ROARING[i]); } unionCount = expectedUnion.cardinality(); printSizeStats(DENSITY, "Random Alternating Bitmap"); diff --git a/processing/src/test/java/org/apache/druid/collections/bitmap/UniformBitmapBenchmarkTest.java b/processing/src/test/java/org/apache/druid/collections/bitmap/UniformBitmapBenchmarkTest.java index 74ecdaa32c1..9f3eeef5004 100644 --- a/processing/src/test/java/org/apache/druid/collections/bitmap/UniformBitmapBenchmarkTest.java +++ b/processing/src/test/java/org/apache/druid/collections/bitmap/UniformBitmapBenchmarkTest.java @@ -68,13 +68,13 @@ public class UniformBitmapBenchmarkTest extends BitmapBenchmark r.add(k); expectedUnion.set(k); } - concise[i] = ImmutableConciseSet.newImmutableFromMutable(c); - offheapConcise[i] = makeOffheapConcise(concise[i]); - roaring[i] = r; - immutableRoaring[i] = makeImmutableRoaring(r); - offheapRoaring[i] = makeOffheapRoaring(r); - genericConcise[i] = new WrappedImmutableConciseBitmap(offheapConcise[i]); - genericRoaring[i] = new WrappedImmutableRoaringBitmap(offheapRoaring[i]); + CONCISE[i] = ImmutableConciseSet.newImmutableFromMutable(c); + OFF_HEAP_CONCISE[i] = makeOffheapConcise(CONCISE[i]); + ROARING[i] = r; + IMMUTABLE_ROARING[i] = makeImmutableRoaring(r); + OFF_HEAP_ROARING[i] = makeOffheapRoaring(r); + GENERIC_CONCISE[i] = new WrappedImmutableConciseBitmap(OFF_HEAP_CONCISE[i]); + GENERIC_ROARING[i] = new WrappedImmutableRoaringBitmap(OFF_HEAP_ROARING[i]); } unionCount = expectedUnion.cardinality(); minIntersection = knownTrue.length; diff --git a/processing/src/test/java/org/apache/druid/query/DataSourceTest.java b/processing/src/test/java/org/apache/druid/query/DataSourceTest.java index 750331b8557..537650881b1 100644 --- a/processing/src/test/java/org/apache/druid/query/DataSourceTest.java +++ b/processing/src/test/java/org/apache/druid/query/DataSourceTest.java @@ -32,28 +32,28 @@ import java.io.IOException; public class DataSourceTest { - private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); @Test public void testSerialization() throws IOException { DataSource dataSource = new TableDataSource("somedatasource"); - String json = jsonMapper.writeValueAsString(dataSource); - DataSource serdeDataSource = jsonMapper.readValue(json, DataSource.class); + String json = JSON_MAPPER.writeValueAsString(dataSource); + DataSource serdeDataSource = JSON_MAPPER.readValue(json, DataSource.class); Assert.assertEquals(dataSource, serdeDataSource); } @Test public void testLegacyDataSource() throws IOException { - DataSource dataSource = jsonMapper.readValue("\"somedatasource\"", DataSource.class); + DataSource dataSource = JSON_MAPPER.readValue("\"somedatasource\"", DataSource.class); Assert.assertEquals(new TableDataSource("somedatasource"), dataSource); } @Test public void testTableDataSource() throws IOException { - DataSource dataSource = jsonMapper.readValue("{\"type\":\"table\", \"name\":\"somedatasource\"}", DataSource.class); + DataSource dataSource = JSON_MAPPER.readValue("{\"type\":\"table\", \"name\":\"somedatasource\"}", DataSource.class); Assert.assertEquals(new TableDataSource("somedatasource"), dataSource); } @@ -62,23 +62,23 @@ public class DataSourceTest { GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); - String dataSourceJSON = "{\"type\":\"query\", \"query\":" + jsonMapper.writeValueAsString(query) + "}"; + String dataSourceJSON = "{\"type\":\"query\", \"query\":" + JSON_MAPPER.writeValueAsString(query) + "}"; - DataSource dataSource = jsonMapper.readValue(dataSourceJSON, DataSource.class); + DataSource dataSource = JSON_MAPPER.readValue(dataSourceJSON, DataSource.class); Assert.assertEquals(new QueryDataSource(query), dataSource); } @Test public void testUnionDataSource() throws Exception { - DataSource dataSource = jsonMapper.readValue( + DataSource dataSource = JSON_MAPPER.readValue( "{\"type\":\"union\", \"dataSources\":[\"ds1\", \"ds2\"]}", DataSource.class ); @@ -92,7 +92,7 @@ public class DataSourceTest Lists.newArrayList(dataSource.getNames()) ); - final DataSource serde = jsonMapper.readValue(jsonMapper.writeValueAsString(dataSource), DataSource.class); + final DataSource serde = JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(dataSource), DataSource.class); Assert.assertEquals(dataSource, serde); } diff --git a/processing/src/test/java/org/apache/druid/query/DefaultQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/DefaultQueryMetricsTest.java index fad50e3c970..871f64546bd 100644 --- a/processing/src/test/java/org/apache/druid/query/DefaultQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/DefaultQueryMetricsTest.java @@ -59,7 +59,7 @@ public class DefaultQueryMetricsTest null )) .metric("count") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("count")) .threshold(5) .filters(new SelectorDimFilter("tags", "t3", null)) @@ -74,7 +74,7 @@ public class DefaultQueryMetricsTest Assert.assertEquals("", actualEvent.get("service")); Assert.assertEquals("xx", actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - List expectedIntervals = QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(); + List expectedIntervals = QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(); List expectedStringIntervals = expectedIntervals.stream().map(Interval::toString).collect(Collectors.toList()); Assert.assertEquals(expectedStringIntervals, actualEvent.get(DruidMetrics.INTERVAL)); diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java index 690461aa4fe..bfb0503f047 100644 --- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java @@ -85,13 +85,13 @@ public class DoubleStorageTest QueryRunnerTestHelper.NOOP_QUERYWATCHER ); - private static final ScanQueryQueryToolChest scanQueryQueryToolChest = new ScanQueryQueryToolChest( + private static final ScanQueryQueryToolChest SCAN_QUERY_QUERY_TOOL_CHEST = new ScanQueryQueryToolChest( new ScanQueryConfig(), DefaultGenericQueryMetricsFactory.instance() ); private static final ScanQueryRunnerFactory SCAN_QUERY_RUNNER_FACTORY = new ScanQueryRunnerFactory( - scanQueryQueryToolChest, + SCAN_QUERY_QUERY_TOOL_CHEST, new ScanQueryEngine(), new ScanQueryConfig() ); @@ -99,9 +99,9 @@ public class DoubleStorageTest private Druids.ScanQueryBuilder newTestQuery() { return Druids.newScanQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE)) .columns(Collections.emptyList()) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .limit(Integer.MAX_VALUE) .legacy(false); } diff --git a/processing/src/test/java/org/apache/druid/query/DruidProcessingConfigTest.java b/processing/src/test/java/org/apache/druid/query/DruidProcessingConfigTest.java index 4a08f78009f..aaee806ed5e 100644 --- a/processing/src/test/java/org/apache/druid/query/DruidProcessingConfigTest.java +++ b/processing/src/test/java/org/apache/druid/query/DruidProcessingConfigTest.java @@ -36,10 +36,10 @@ import java.util.Properties; */ public class DruidProcessingConfigTest { - private static final long bufferSize = 1024L * 1024L * 1024L; - private static final int numProcessors = 4; - private static final long directSize = bufferSize * (3L + 2L + 1L); - private static final long heapSize = bufferSize * 2L; + private static final long BUFFER_SIZE = 1024L * 1024L * 1024L; + private static final int NUM_PROCESSORS = 4; + private static final long DIRECT_SIZE = BUFFER_SIZE * (3L + 2L + 1L); + private static final long HEAP_SIZE = BUFFER_SIZE * 2L; private static Injector makeInjector(int numProcessors, long directMemorySize, long heapSize) { @@ -77,22 +77,22 @@ public class DruidProcessingConfigTest @Test public void testDefaultsMultiProcessor() { - Injector injector = makeInjector(numProcessors, directSize, heapSize); + Injector injector = makeInjector(NUM_PROCESSORS, DIRECT_SIZE, HEAP_SIZE); DruidProcessingConfig config = injector.getInstance(DruidProcessingConfig.class); Assert.assertEquals(Integer.MAX_VALUE, config.poolCacheMaxCount()); - Assert.assertEquals(numProcessors - 1, config.getNumThreads()); + Assert.assertEquals(NUM_PROCESSORS - 1, config.getNumThreads()); Assert.assertEquals(Math.max(2, config.getNumThreads() / 4), config.getNumMergeBuffers()); Assert.assertEquals(0, config.columnCacheSizeBytes()); Assert.assertFalse(config.isFifo()); Assert.assertEquals(System.getProperty("java.io.tmpdir"), config.getTmpDir()); - Assert.assertEquals(bufferSize, config.intermediateComputeSizeBytes()); + Assert.assertEquals(BUFFER_SIZE, config.intermediateComputeSizeBytes()); } @Test public void testDefaultsSingleProcessor() { - Injector injector = makeInjector(1, bufferSize * 4L, heapSize); + Injector injector = makeInjector(1, BUFFER_SIZE * 4L, HEAP_SIZE); DruidProcessingConfig config = injector.getInstance(DruidProcessingConfig.class); Assert.assertEquals(Integer.MAX_VALUE, config.poolCacheMaxCount()); @@ -101,14 +101,14 @@ public class DruidProcessingConfigTest Assert.assertEquals(0, config.columnCacheSizeBytes()); Assert.assertFalse(config.isFifo()); Assert.assertEquals(System.getProperty("java.io.tmpdir"), config.getTmpDir()); - Assert.assertEquals(bufferSize, config.intermediateComputeSizeBytes()); + Assert.assertEquals(BUFFER_SIZE, config.intermediateComputeSizeBytes()); } @Test public void testDefaultsLargeDirect() { // test that auto sized buffer is no larger than 1 - Injector injector = makeInjector(1, bufferSize * 100L, heapSize); + Injector injector = makeInjector(1, BUFFER_SIZE * 100L, HEAP_SIZE); DruidProcessingConfig config = injector.getInstance(DruidProcessingConfig.class); Assert.assertEquals( @@ -129,9 +129,9 @@ public class DruidProcessingConfigTest props.setProperty("druid.processing.tmpDir", "/test/path"); Injector injector = makeInjector( - numProcessors, - directSize, - heapSize, + NUM_PROCESSORS, + DIRECT_SIZE, + HEAP_SIZE, props, ImmutableMap.of("base_path", "druid.processing") ); diff --git a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java index 7ec9ed91307..b570677a0ed 100644 --- a/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java +++ b/processing/src/test/java/org/apache/druid/query/MultiValuedDimensionTest.java @@ -1014,7 +1014,7 @@ public class MultiValuedDimensionTest null )) .metric("count") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("count")) .threshold(5) .filters(new SelectorDimFilter("tags", "t3", null)) @@ -1068,7 +1068,7 @@ public class MultiValuedDimensionTest ) ) .metric("count") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("count")) .threshold(15) .build(); @@ -1128,7 +1128,7 @@ public class MultiValuedDimensionTest ) ) .metric("count") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("count")) .threshold(15) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java b/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java index f0ff59f83a7..bb7ddd4b7df 100644 --- a/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/QueryRunnerTestHelper.java @@ -88,40 +88,40 @@ public class QueryRunnerTestHelper public static final QueryWatcher NOOP_QUERYWATCHER = (query, future) -> { }; - public static final String dataSource = "testing"; - public static final Interval fullOnInterval = Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"); - public static final SegmentId segmentId = SegmentId.of(dataSource, fullOnInterval, "dummy_version", 0); - public static final UnionDataSource unionDataSource = new UnionDataSource( - Stream.of(dataSource, dataSource, dataSource, dataSource).map(TableDataSource::new).collect(Collectors.toList()) + public static final String DATA_SOURCE = "testing"; + public static final Interval FULL_ON_INTERVAL = Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z"); + public static final SegmentId SEGMENT_ID = SegmentId.of(DATA_SOURCE, FULL_ON_INTERVAL, "dummy_version", 0); + public static final UnionDataSource UNION_DATA_SOURCE = new UnionDataSource( + Stream.of(DATA_SOURCE, DATA_SOURCE, DATA_SOURCE, DATA_SOURCE).map(TableDataSource::new).collect(Collectors.toList()) ); - public static final Granularity dayGran = Granularities.DAY; - public static final Granularity allGran = Granularities.ALL; - public static final Granularity monthGran = Granularities.MONTH; - public static final String timeDimension = "__time"; - public static final String marketDimension = "market"; - public static final String qualityDimension = "quality"; - public static final String placementDimension = "placement"; - public static final String placementishDimension = "placementish"; - public static final String partialNullDimension = "partial_null_column"; + public static final Granularity DAY_GRAN = Granularities.DAY; + public static final Granularity ALL_GRAN = Granularities.ALL; + public static final Granularity MONTH_GRAN = Granularities.MONTH; + public static final String TIME_DIMENSION = "__time"; + public static final String MARKET_DIMENSION = "market"; + public static final String QUALITY_DIMENSION = "quality"; + public static final String PLACEMENT_DIMENSION = "placement"; + public static final String PLACEMENTISH_DIMENSION = "placementish"; + public static final String PARTIAL_NULL_DIMENSION = "partial_null_column"; - public static final List dimensions = Lists.newArrayList( - marketDimension, - qualityDimension, - placementDimension, - placementishDimension + public static final List DIMENSIONS = Lists.newArrayList( + MARKET_DIMENSION, + QUALITY_DIMENSION, + PLACEMENT_DIMENSION, + PLACEMENTISH_DIMENSION ); - public static final String indexMetric = "index"; - public static final String uniqueMetric = "uniques"; - public static final String addRowsIndexConstantMetric = "addRowsIndexConstant"; + public static final String INDEX_METRIC = "index"; + public static final String UNIQUE_METRIC = "uniques"; + public static final String ADD_ROWS_INDEX_CONSTANT_METRIC = "addRowsIndexConstant"; public static String dependentPostAggMetric = "dependentPostAgg"; - public static final CountAggregatorFactory rowsCount = new CountAggregatorFactory("rows"); - public static final LongSumAggregatorFactory indexLongSum = new LongSumAggregatorFactory("index", indexMetric); - public static final LongSumAggregatorFactory __timeLongSum = new LongSumAggregatorFactory("sumtime", timeDimension); - public static final DoubleSumAggregatorFactory indexDoubleSum = new DoubleSumAggregatorFactory("index", indexMetric); + public static final CountAggregatorFactory ROWS_COUNT = new CountAggregatorFactory("rows"); + public static final LongSumAggregatorFactory INDEX_LONG_SUM = new LongSumAggregatorFactory("index", INDEX_METRIC); + public static final LongSumAggregatorFactory TIME_LONG_SUM = new LongSumAggregatorFactory("sumtime", TIME_DIMENSION); + public static final DoubleSumAggregatorFactory INDEX_DOUBLE_SUM = new DoubleSumAggregatorFactory("index", INDEX_METRIC); public static final String JS_COMBINE_A_PLUS_B = "function combine(a, b) { return a + b; }"; public static final String JS_RESET_0 = "function reset() { return 0; }"; - public static final JavaScriptAggregatorFactory jsIndexSumIfPlacementishA = new JavaScriptAggregatorFactory( + public static final JavaScriptAggregatorFactory JS_INDEX_SUM_IF_PLACEMENTISH_A = new JavaScriptAggregatorFactory( "nindex", Arrays.asList("placementish", "index"), "function aggregate(current, a, b) { if ((Array.isArray(a) && a.indexOf('a') > -1) || a === 'a') { return current + b; } else { return current; } }", @@ -129,7 +129,7 @@ public class QueryRunnerTestHelper JS_COMBINE_A_PLUS_B, JavaScriptConfig.getEnabledInstance() ); - public static final JavaScriptAggregatorFactory jsCountIfTimeGreaterThan = new JavaScriptAggregatorFactory( + public static final JavaScriptAggregatorFactory JS_COUNT_IF_TIME_GREATER_THAN = new JavaScriptAggregatorFactory( "ntimestamps", Collections.singletonList("__time"), "function aggregate(current, t) { if (t > " + @@ -139,7 +139,7 @@ public class QueryRunnerTestHelper JS_COMBINE_A_PLUS_B, JavaScriptConfig.getEnabledInstance() ); - public static final JavaScriptAggregatorFactory jsPlacementishCount = new JavaScriptAggregatorFactory( + public static final JavaScriptAggregatorFactory JS_PLACEMENTISH_COUNT = new JavaScriptAggregatorFactory( "pishcount", Arrays.asList("placementish", "index"), "function aggregate(current, a) { if (Array.isArray(a)) { return current + a.length; } else if (typeof a === 'string') { return current + 1; } else { return current; } }", @@ -147,57 +147,57 @@ public class QueryRunnerTestHelper JS_COMBINE_A_PLUS_B, JavaScriptConfig.getEnabledInstance() ); - public static final HyperUniquesAggregatorFactory qualityUniques = new HyperUniquesAggregatorFactory( + public static final HyperUniquesAggregatorFactory QUALITY_UNIQUES = new HyperUniquesAggregatorFactory( "uniques", "quality_uniques" ); - public static final HyperUniquesAggregatorFactory qualityUniquesRounded = new HyperUniquesAggregatorFactory( + public static final HyperUniquesAggregatorFactory QUALITY_UNIQUES_ROUNDED = new HyperUniquesAggregatorFactory( "uniques", "quality_uniques", false, true ); - public static final CardinalityAggregatorFactory qualityCardinality = new CardinalityAggregatorFactory( + public static final CardinalityAggregatorFactory QUALITY_CARDINALITY = new CardinalityAggregatorFactory( "cardinality", Collections.singletonList(new DefaultDimensionSpec("quality", "quality")), false ); - public static final ConstantPostAggregator constant = new ConstantPostAggregator("const", 1L); - public static final FieldAccessPostAggregator rowsPostAgg = new FieldAccessPostAggregator("rows", "rows"); - public static final FieldAccessPostAggregator indexPostAgg = new FieldAccessPostAggregator("index", "index"); - public static final ArithmeticPostAggregator addRowsIndexConstant = new ArithmeticPostAggregator( - addRowsIndexConstantMetric, + public static final ConstantPostAggregator CONSTANT = new ConstantPostAggregator("const", 1L); + public static final FieldAccessPostAggregator ROWS_POST_AGG = new FieldAccessPostAggregator("rows", "rows"); + public static final FieldAccessPostAggregator INDEX_POST_AGG = new FieldAccessPostAggregator("index", "index"); + public static final ArithmeticPostAggregator ADD_ROWS_INDEX_CONSTANT = new ArithmeticPostAggregator( + ADD_ROWS_INDEX_CONSTANT_METRIC, "+", - Lists.newArrayList(constant, rowsPostAgg, indexPostAgg) + Lists.newArrayList(CONSTANT, ROWS_POST_AGG, INDEX_POST_AGG) ); // dependent on AddRowsIndexContact postAgg - public static final ArithmeticPostAggregator dependentPostAgg = new ArithmeticPostAggregator( + public static final ArithmeticPostAggregator DEPENDENT_POST_AGG = new ArithmeticPostAggregator( dependentPostAggMetric, "+", Lists.newArrayList( - constant, - new FieldAccessPostAggregator(addRowsIndexConstantMetric, addRowsIndexConstantMetric), + CONSTANT, + new FieldAccessPostAggregator(ADD_ROWS_INDEX_CONSTANT_METRIC, ADD_ROWS_INDEX_CONSTANT_METRIC), new FieldAccessPostAggregator("rows", "rows") ) ); - public static final String hyperUniqueFinalizingPostAggMetric = "hyperUniqueFinalizingPostAggMetric"; + public static final String HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC = "hyperUniqueFinalizingPostAggMetric"; public static ArithmeticPostAggregator hyperUniqueFinalizingPostAgg = new ArithmeticPostAggregator( - hyperUniqueFinalizingPostAggMetric, + HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, "+", Lists.newArrayList( - new HyperUniqueFinalizingPostAggregator(uniqueMetric, uniqueMetric), + new HyperUniqueFinalizingPostAggregator(UNIQUE_METRIC, UNIQUE_METRIC), new ConstantPostAggregator(null, 1) ) ); - public static final List commonDoubleAggregators = Arrays.asList( - rowsCount, - indexDoubleSum, - qualityUniques + public static final List COMMON_DOUBLE_AGGREGATORS = Arrays.asList( + ROWS_COUNT, + INDEX_DOUBLE_SUM, + QUALITY_UNIQUES ); - public static final List commonFloatAggregators = Arrays.asList( + public static final List COMMON_FLOAT_AGGREGATORS = Arrays.asList( new FloatSumAggregatorFactory("index", "indexFloat"), new CountAggregatorFactory("rows"), new HyperUniquesAggregatorFactory( @@ -210,7 +210,7 @@ public class QueryRunnerTestHelper public static final double UNIQUES_2 = 2.000977198748901d; public static final double UNIQUES_1 = 1.0002442201269182d; - public static final String[] expectedFullOnIndexValues = new String[]{ + public static final String[] EXPECTED_FULL_ON_INDEX_VALUES = new String[]{ "4500.0", "6077.949111938477", "4922.488838195801", "5726.140853881836", "4698.468170166016", "4651.030891418457", "4398.145851135254", "4596.068244934082", "4434.630561828613", "0.0", "6162.801361083984", "5590.292701721191", "4994.298484802246", "5179.679672241211", "6288.556800842285", @@ -232,30 +232,30 @@ public class QueryRunnerTestHelper "5506.567192077637", "4743.144546508789", "4913.282669067383", "4723.869743347168" }; - public static final String[] expectedFullOnIndexValuesDesc; + public static final String[] EXPECTED_FULL_ON_INDEX_VALUES_DESC; static { - List list = new ArrayList<>(Arrays.asList(expectedFullOnIndexValues)); + List list = new ArrayList<>(Arrays.asList(EXPECTED_FULL_ON_INDEX_VALUES)); Collections.reverse(list); - expectedFullOnIndexValuesDesc = list.toArray(new String[0]); + EXPECTED_FULL_ON_INDEX_VALUES_DESC = list.toArray(new String[0]); } - public static final DateTime earliest = DateTimes.of("2011-01-12"); - public static final DateTime last = DateTimes.of("2011-04-15"); + public static final DateTime EARLIEST = DateTimes.of("2011-01-12"); + public static final DateTime LAST = DateTimes.of("2011-04-15"); - public static final DateTime skippedDay = DateTimes.of("2011-01-21T00:00:00.000Z"); + public static final DateTime SKIPPED_DAY = DateTimes.of("2011-01-21T00:00:00.000Z"); - public static final QuerySegmentSpec firstToThird = new MultipleIntervalSegmentSpec( + public static final QuerySegmentSpec FIRST_TO_THIRD = new MultipleIntervalSegmentSpec( Collections.singletonList(Intervals.of("2011-04-01T00:00:00.000Z/2011-04-03T00:00:00.000Z")) ); - public static final QuerySegmentSpec secondOnly = new MultipleIntervalSegmentSpec( + public static final QuerySegmentSpec SECOND_ONLY = new MultipleIntervalSegmentSpec( Collections.singletonList(Intervals.of("2011-04-02T00:00:00.000Z/P1D")) ); - public static final QuerySegmentSpec fullOnIntervalSpec = new MultipleIntervalSegmentSpec( - Collections.singletonList(fullOnInterval) + public static final QuerySegmentSpec FULL_ON_INTERVAL_SPEC = new MultipleIntervalSegmentSpec( + Collections.singletonList(FULL_ON_INTERVAL) ); - public static final QuerySegmentSpec emptyInterval = new MultipleIntervalSegmentSpec( + public static final QuerySegmentSpec EMPTY_INTERVAL = new MultipleIntervalSegmentSpec( Collections.singletonList(Intervals.of("2020-04-02T00:00:00.000Z/P1D")) ); @@ -343,15 +343,15 @@ public class QueryRunnerTestHelper final QueryableIndex noRollupMMappedTestIndex = TestIndex.getNoRollupMMappedTestIndex(); final QueryableIndex mergedRealtimeIndex = TestIndex.mergedRealtimeIndex(); return ImmutableList.of( - makeQueryRunner(factory, new IncrementalIndexSegment(rtIndex, segmentId), ("rtIndex")), - makeQueryRunner(factory, new IncrementalIndexSegment(noRollupRtIndex, segmentId), "noRollupRtIndex"), - makeQueryRunner(factory, new QueryableIndexSegment(mMappedTestIndex, segmentId), "mMappedTestIndex"), + makeQueryRunner(factory, new IncrementalIndexSegment(rtIndex, SEGMENT_ID), ("rtIndex")), + makeQueryRunner(factory, new IncrementalIndexSegment(noRollupRtIndex, SEGMENT_ID), "noRollupRtIndex"), + makeQueryRunner(factory, new QueryableIndexSegment(mMappedTestIndex, SEGMENT_ID), "mMappedTestIndex"), makeQueryRunner( factory, - new QueryableIndexSegment(noRollupMMappedTestIndex, segmentId), + new QueryableIndexSegment(noRollupMMappedTestIndex, SEGMENT_ID), "noRollupMMappedTestIndex" ), - makeQueryRunner(factory, new QueryableIndexSegment(mergedRealtimeIndex, segmentId), "mergedRealtimeIndex") + makeQueryRunner(factory, new QueryableIndexSegment(mergedRealtimeIndex, SEGMENT_ID), "mergedRealtimeIndex") ); } @@ -363,11 +363,11 @@ public class QueryRunnerTestHelper final QueryableIndex mergedRealtimeIndex = TestIndex.mergedRealtimeIndex(); return Arrays.asList( - makeUnionQueryRunner(factory, new IncrementalIndexSegment(rtIndex, segmentId), "rtIndex"), - makeUnionQueryRunner(factory, new QueryableIndexSegment(mMappedTestIndex, segmentId), "mMappedTestIndex"), + makeUnionQueryRunner(factory, new IncrementalIndexSegment(rtIndex, SEGMENT_ID), "rtIndex"), + makeUnionQueryRunner(factory, new QueryableIndexSegment(mMappedTestIndex, SEGMENT_ID), "mMappedTestIndex"), makeUnionQueryRunner( factory, - new QueryableIndexSegment(mergedRealtimeIndex, segmentId), + new QueryableIndexSegment(mergedRealtimeIndex, SEGMENT_ID), "mergedRealtimeIndex" ) ); @@ -381,8 +381,8 @@ public class QueryRunnerTestHelper { return makeQueryRunner( factory, - segmentId, - new IncrementalIndexSegment(TestIndex.makeRealtimeIndex(resourceFileName), segmentId), + SEGMENT_ID, + new IncrementalIndexSegment(TestIndex.makeRealtimeIndex(resourceFileName), SEGMENT_ID), runnerName ); } @@ -393,7 +393,7 @@ public class QueryRunnerTestHelper final String runnerName ) { - return makeQueryRunner(factory, segmentId, adapter, runnerName); + return makeQueryRunner(factory, SEGMENT_ID, adapter, runnerName); } public static > QueryRunner makeQueryRunner( @@ -423,7 +423,7 @@ public class QueryRunnerTestHelper ) { BySegmentQueryRunner bySegmentQueryRunner = - new BySegmentQueryRunner<>(segmentId, adapter.getDataInterval().getStart(), factory.createRunner(adapter)); + new BySegmentQueryRunner<>(SEGMENT_ID, adapter.getDataInterval().getStart(), factory.createRunner(adapter)); final QueryRunner runner = new FluentQueryRunnerBuilder(factory.getToolchest()) .create(new UnionQueryRunner<>(bySegmentQueryRunner)) .mergeResults() diff --git a/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java index 9826c75afef..6379b6a6bc4 100644 --- a/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/RetryQueryRunnerTest.java @@ -70,17 +70,17 @@ public class RetryQueryRunnerTest private final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); final TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/TestQueryRunners.java b/processing/src/test/java/org/apache/druid/query/TestQueryRunners.java index ecba0055ef2..4b4b4d3e086 100644 --- a/processing/src/test/java/org/apache/druid/query/TestQueryRunners.java +++ b/processing/src/test/java/org/apache/druid/query/TestQueryRunners.java @@ -41,7 +41,7 @@ import java.nio.ByteBuffer; */ public class TestQueryRunners { - private static final TopNQueryConfig topNConfig = new TopNQueryConfig(); + private static final TopNQueryConfig TOPN_CONFIG = new TopNQueryConfig(); public static CloseableStupidPool createDefaultNonBlockingPool() { @@ -56,7 +56,7 @@ public class TestQueryRunners QueryRunnerFactory factory = new TopNQueryRunnerFactory( pool, new TopNQueryQueryToolChest( - topNConfig, + TOPN_CONFIG, QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ), QueryRunnerTestHelper.NOOP_QUERYWATCHER diff --git a/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java index a64c31301f8..3b9e5e8971f 100644 --- a/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/UnionQueryRunnerTest.java @@ -68,7 +68,7 @@ public class UnionQueryRunnerTest ) ) .intervals("2014-01-01T00:00:00Z/2015-01-01T00:00:00Z") - .aggregators(QueryRunnerTestHelper.commonDoubleAggregators) + .aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS) .build(); ResponseContext responseContext = ResponseContext.createEmpty(); Sequence result = runner.run(QueryPlus.wrap(q), responseContext); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java index 49f92b75a4b..be97e65abd0 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java @@ -113,7 +113,7 @@ public class AggregatorUtilTest ArrayList aggregatorFactories = Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -122,8 +122,8 @@ public class AggregatorUtilTest ); List postAggregatorList = Arrays.asList( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG ); Pair, List> aggregatorsPair = AggregatorUtil.condensedAggregators( aggregatorFactories, @@ -132,13 +132,13 @@ public class AggregatorUtilTest ); // verify aggregators Assert.assertEquals( - Lists.newArrayList(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum), + Lists.newArrayList(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM), aggregatorsPair.lhs ); Assert.assertEquals( Lists.newArrayList( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG ), aggregatorsPair.rhs ); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java index e3024782f7b..7d84d95e7bc 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorBenchmark.java @@ -32,12 +32,12 @@ import java.util.Map; public class JavaScriptAggregatorBenchmark extends SimpleBenchmark { - protected static final Map scriptDoubleSum = new HashMap<>(); + protected static final Map SCRIPT_DOUBLE_SUM = new HashMap<>(); static { - scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }"); - scriptDoubleSum.put("fnReset", "function reset() { return 0 }"); - scriptDoubleSum.put("fnCombine", "function combine(a,b) { return a + b }"); + SCRIPT_DOUBLE_SUM.put("fnAggregate", "function aggregate(current, a) { return current + a }"); + SCRIPT_DOUBLE_SUM.put("fnReset", "function reset() { return 0 }"); + SCRIPT_DOUBLE_SUM.put("fnCombine", "function combine(a,b) { return a + b }"); } private static void aggregate(TestDoubleColumnSelectorImpl selector, Aggregator agg) @@ -53,7 +53,7 @@ public class JavaScriptAggregatorBenchmark extends SimpleBenchmark @Override protected void setUp() { - Map script = scriptDoubleSum; + Map script = SCRIPT_DOUBLE_SUM; jsAggregator = new JavaScriptAggregator( Collections.singletonList(selector), diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java index 757001b0ed3..808e64dcf21 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/JavaScriptAggregatorTest.java @@ -40,8 +40,8 @@ import java.util.Map; public class JavaScriptAggregatorTest { - protected static final Map sumLogATimesBPlusTen = new HashMap<>(); - protected static final Map scriptDoubleSum = new HashMap<>(); + protected static final Map SUM_LOG_A_TIMES_B_PLUS_TEN = new HashMap<>(); + protected static final Map SCRIPT_DOUBLE_SUM = new HashMap<>(); final ColumnSelectorFactory DUMMY_COLUMN_SELECTOR_FACTORY = new ColumnSelectorFactory() { @@ -65,13 +65,13 @@ public class JavaScriptAggregatorTest }; static { - sumLogATimesBPlusTen.put("fnAggregate", "function aggregate(current, a, b) { return current + (Math.log(a) * b) }"); - sumLogATimesBPlusTen.put("fnReset", "function reset() { return 10 }"); - sumLogATimesBPlusTen.put("fnCombine", "function combine(a,b) { return a + b }"); + SUM_LOG_A_TIMES_B_PLUS_TEN.put("fnAggregate", "function aggregate(current, a, b) { return current + (Math.log(a) * b) }"); + SUM_LOG_A_TIMES_B_PLUS_TEN.put("fnReset", "function reset() { return 10 }"); + SUM_LOG_A_TIMES_B_PLUS_TEN.put("fnCombine", "function combine(a,b) { return a + b }"); - scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }"); - scriptDoubleSum.put("fnReset", "function reset() { return 0 }"); - scriptDoubleSum.put("fnCombine", "function combine(a,b) { return a + b }"); + SCRIPT_DOUBLE_SUM.put("fnAggregate", "function aggregate(current, a) { return current + a }"); + SCRIPT_DOUBLE_SUM.put("fnReset", "function reset() { return 0 }"); + SCRIPT_DOUBLE_SUM.put("fnCombine", "function combine(a,b) { return a + b }"); } @Rule @@ -114,7 +114,7 @@ public class JavaScriptAggregatorTest final TestDoubleColumnSelectorImpl selector1 = new TestDoubleColumnSelectorImpl(new double[]{42.12d, 9d}); final TestDoubleColumnSelectorImpl selector2 = new TestDoubleColumnSelectorImpl(new double[]{2d, 3d}); - Map script = sumLogATimesBPlusTen; + Map script = SUM_LOG_A_TIMES_B_PLUS_TEN; JavaScriptAggregator agg = new JavaScriptAggregator( Arrays.asList(selector1, selector2), @@ -149,7 +149,7 @@ public class JavaScriptAggregatorTest final TestFloatColumnSelector selector1 = new TestFloatColumnSelector(new float[]{42.12f, 9f}); final TestFloatColumnSelector selector2 = new TestFloatColumnSelector(new float[]{2f, 3f}); - Map script = sumLogATimesBPlusTen; + Map script = SUM_LOG_A_TIMES_B_PLUS_TEN; JavaScriptBufferAggregator agg = new JavaScriptBufferAggregator( Arrays.asList(selector1, selector2), JavaScriptAggregatorFactory.compileScript( @@ -184,7 +184,7 @@ public class JavaScriptAggregatorTest @Test public void testAggregateMissingColumn() { - Map script = scriptDoubleSum; + Map script = SCRIPT_DOUBLE_SUM; JavaScriptAggregator agg = new JavaScriptAggregator( Collections.singletonList(null), @@ -222,8 +222,8 @@ public class JavaScriptAggregatorTest Collections.singletonList(ocs), JavaScriptAggregatorFactory.compileScript( "function aggregate(current, a) { if (Array.isArray(a)) { return current + a.length; } else if (typeof a === 'string') { return current + 1; } else { return current; } }", - scriptDoubleSum.get("fnReset"), - scriptDoubleSum.get("fnCombine") + SCRIPT_DOUBLE_SUM.get("fnReset"), + SCRIPT_DOUBLE_SUM.get("fnCombine") ) ); @@ -256,9 +256,9 @@ public class JavaScriptAggregatorTest final JavaScriptAggregatorFactory factory = new JavaScriptAggregatorFactory( "foo", ImmutableList.of("foo"), - scriptDoubleSum.get("fnAggregate"), - scriptDoubleSum.get("fnReset"), - scriptDoubleSum.get("fnCombine"), + SCRIPT_DOUBLE_SUM.get("fnAggregate"), + SCRIPT_DOUBLE_SUM.get("fnReset"), + SCRIPT_DOUBLE_SUM.get("fnCombine"), new JavaScriptConfig(false) ); @@ -274,9 +274,9 @@ public class JavaScriptAggregatorTest final JavaScriptAggregatorFactory factory = new JavaScriptAggregatorFactory( "foo", ImmutableList.of("foo"), - scriptDoubleSum.get("fnAggregate"), - scriptDoubleSum.get("fnReset"), - scriptDoubleSum.get("fnCombine"), + SCRIPT_DOUBLE_SUM.get("fnAggregate"), + SCRIPT_DOUBLE_SUM.get("fnReset"), + SCRIPT_DOUBLE_SUM.get("fnCombine"), new JavaScriptConfig(false) ); @@ -308,7 +308,7 @@ public class JavaScriptAggregatorTest } */ - Map script = scriptDoubleSum; + Map script = SCRIPT_DOUBLE_SUM; JavaScriptAggregator aggRhino = new JavaScriptAggregator( Collections.singletonList(selector), JavaScriptAggregatorFactory.compileScript( diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java index c17e645aa8b..65631c5cced 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/cardinality/CardinalityAggregatorTest.java @@ -207,10 +207,10 @@ public class CardinalityAggregatorTest combine(values1, values2): 8 distinct rows combine(values1, values2): 7 distinct values */ - private static final List values1 = dimensionValues( + private static final List VALUES1 = dimensionValues( "a", "b", "c", "a", "a", null, "b", "b", "b", "b", "a", "a" ); - private static final List values2 = dimensionValues( + private static final List VALUES2 = dimensionValues( "a", "b", "c", @@ -290,8 +290,8 @@ public class CardinalityAggregatorTest public CardinalityAggregatorTest() { - dim1 = new TestDimensionSelector(values1, null); - dim2 = new TestDimensionSelector(values2, null); + dim1 = new TestDimensionSelector(VALUES1, null); + dim2 = new TestDimensionSelector(VALUES2, null); dimInfoList = Lists.newArrayList( new ColumnSelectorPlus( @@ -342,8 +342,8 @@ public class CardinalityAggregatorTest String superJsFn = "function(str) { return 'super-' + str; }"; ExtractionFn superFn = new JavaScriptExtractionFn(superJsFn, false, JavaScriptConfig.getEnabledInstance()); - dim1WithExtraction = new TestDimensionSelector(values1, superFn); - dim2WithExtraction = new TestDimensionSelector(values2, superFn); + dim1WithExtraction = new TestDimensionSelector(VALUES1, superFn); + dim2WithExtraction = new TestDimensionSelector(VALUES2, superFn); selectorListWithExtraction = Lists.newArrayList(dim1WithExtraction, dim2WithExtraction); dimInfoListWithExtraction = Lists.newArrayList( new ColumnSelectorPlus<>( @@ -360,8 +360,8 @@ public class CardinalityAggregatorTest String helloJsFn = "function(str) { return 'hello' }"; ExtractionFn helloFn = new JavaScriptExtractionFn(helloJsFn, false, JavaScriptConfig.getEnabledInstance()); - dim1ConstantVal = new TestDimensionSelector(values1, helloFn); - dim2ConstantVal = new TestDimensionSelector(values2, helloFn); + dim1ConstantVal = new TestDimensionSelector(VALUES1, helloFn); + dim2ConstantVal = new TestDimensionSelector(VALUES2, helloFn); selectorListConstantVal = Lists.newArrayList(dim1ConstantVal, dim2ConstantVal); dimInfoListConstantVal = Lists.newArrayList( new ColumnSelectorPlus<>( @@ -387,7 +387,7 @@ public class CardinalityAggregatorTest ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorList, agg); } Assert.assertEquals(9.0, (Double) rowAggregatorFactory.finalizeComputation(agg.get()), 0.05); @@ -402,7 +402,7 @@ public class CardinalityAggregatorTest false ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorList, agg); } Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05); @@ -424,7 +424,7 @@ public class CardinalityAggregatorTest agg.init(buf, pos); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { bufferAggregate(selectorList, agg, buf, pos); } Assert.assertEquals(9.0, (Double) rowAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05); @@ -446,7 +446,7 @@ public class CardinalityAggregatorTest agg.init(buf, pos); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { bufferAggregate(selectorList, agg, buf, pos); } Assert.assertEquals(NullHandling.replaceWithDefault() ? 7.0 : 6.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos)), 0.05); @@ -476,10 +476,10 @@ public class CardinalityAggregatorTest CardinalityAggregator agg1 = new CardinalityAggregator(dimInfo1, true); CardinalityAggregator agg2 = new CardinalityAggregator(dimInfo2, true); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selector1, agg1); } - for (int i = 0; i < values2.size(); ++i) { + for (int i = 0; i < VALUES2.size(); ++i) { aggregate(selector2, agg2); } @@ -522,10 +522,10 @@ public class CardinalityAggregatorTest CardinalityAggregator agg1 = new CardinalityAggregator(dimInfo1, false); CardinalityAggregator agg2 = new CardinalityAggregator(dimInfo2, false); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selector1, agg1); } - for (int i = 0; i < values2.size(); ++i) { + for (int i = 0; i < VALUES2.size(); ++i) { aggregate(selector2, agg2); } @@ -551,7 +551,7 @@ public class CardinalityAggregatorTest dimInfoListWithExtraction, true ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorListWithExtraction, agg); } Assert.assertEquals(9.0, (Double) rowAggregatorFactory.finalizeComputation(agg.get()), 0.05); @@ -560,7 +560,7 @@ public class CardinalityAggregatorTest dimInfoListConstantVal, true ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorListConstantVal, agg2); } Assert.assertEquals(3.0, (Double) rowAggregatorFactory.finalizeComputation(agg2.get()), 0.05); @@ -573,7 +573,7 @@ public class CardinalityAggregatorTest dimInfoListWithExtraction, false ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorListWithExtraction, agg); } Assert.assertEquals(7.0, (Double) valueAggregatorFactory.finalizeComputation(agg.get()), 0.05); @@ -582,7 +582,7 @@ public class CardinalityAggregatorTest dimInfoListConstantVal, false ); - for (int i = 0; i < values1.size(); ++i) { + for (int i = 0; i < VALUES1.size(); ++i) { aggregate(selectorListConstantVal, agg2); } Assert.assertEquals(1.0, (Double) valueAggregatorFactory.finalizeComputation(agg2.get()), 0.05); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java index df1bce84988..6442002f971 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/first/StringFirstTimeseriesQueryTest.java @@ -95,9 +95,9 @@ public class StringFirstTimeseriesQueryTest ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Collections.singletonList( new StringFirstAggregatorFactory( diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java index 0813fd2825b..f9151923413 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/hyperloglog/HyperUniquesAggregatorFactoryTest.java @@ -35,7 +35,7 @@ import java.util.Random; public class HyperUniquesAggregatorFactoryTest { - static final HyperUniquesAggregatorFactory aggregatorFactory = new HyperUniquesAggregatorFactory( + static final HyperUniquesAggregatorFactory AGGREGATOR_FACTORY = new HyperUniquesAggregatorFactory( "hyperUnique", "uniques" ); @@ -46,7 +46,7 @@ public class HyperUniquesAggregatorFactoryTest @Test public void testDeserializeV0() { - Object v0 = aggregatorFactory.deserialize(V0_BASE64); + Object v0 = AGGREGATOR_FACTORY.deserialize(V0_BASE64); Assert.assertEquals("deserialized value is VersionZeroHyperLogLogCollector", VersionZeroHyperLogLogCollector.class, v0.getClass()); } diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java index aa31c0ade4b..87ec83ce0e5 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/last/StringLastTimeseriesQueryTest.java @@ -95,9 +95,9 @@ public class StringLastTimeseriesQueryTest ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Collections.singletonList( new StringLastAggregatorFactory( diff --git a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java index bb71f8e023e..d22ba99efe9 100644 --- a/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/datasourcemetadata/DataSourceMetadataQueryTest.java @@ -56,7 +56,7 @@ import java.util.Map; public class DataSourceMetadataQueryTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testQuerySerialization() throws IOException @@ -65,8 +65,8 @@ public class DataSourceMetadataQueryTest .dataSource("testing") .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } diff --git a/processing/src/test/java/org/apache/druid/query/dimension/ListFilteredDimensionSpecTest.java b/processing/src/test/java/org/apache/druid/query/dimension/ListFilteredDimensionSpecTest.java index 75bdba360cd..636da053212 100644 --- a/processing/src/test/java/org/apache/druid/query/dimension/ListFilteredDimensionSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/dimension/ListFilteredDimensionSpecTest.java @@ -122,7 +122,7 @@ public class ListFilteredDimensionSpecTest true ); - DimensionSelector selector = spec.decorate(TestDimensionSelector.instance); + DimensionSelector selector = spec.decorate(TestDimensionSelector.INSTANCE); Assert.assertEquals(2, selector.getValueCardinality()); @@ -147,7 +147,7 @@ public class ListFilteredDimensionSpecTest false ); - DimensionSelector selector = spec.decorate(TestDimensionSelector.instance); + DimensionSelector selector = spec.decorate(TestDimensionSelector.INSTANCE); Assert.assertEquals(24, selector.getValueCardinality()); @@ -171,7 +171,7 @@ public class ListFilteredDimensionSpecTest false ); - DimensionSelector selector = spec.decorate(TestDimensionSelector.instance); + DimensionSelector selector = spec.decorate(TestDimensionSelector.INSTANCE); Assert.assertEquals(25, selector.getValueCardinality()); diff --git a/processing/src/test/java/org/apache/druid/query/dimension/PrefixFilteredDimensionSpecTest.java b/processing/src/test/java/org/apache/druid/query/dimension/PrefixFilteredDimensionSpecTest.java index 9538a538156..bf744f4d4e5 100644 --- a/processing/src/test/java/org/apache/druid/query/dimension/PrefixFilteredDimensionSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/dimension/PrefixFilteredDimensionSpecTest.java @@ -84,7 +84,7 @@ public class PrefixFilteredDimensionSpecTest "c" ); - DimensionSelector selector = spec.decorate(TestDimensionSelector.instance); + DimensionSelector selector = spec.decorate(TestDimensionSelector.INSTANCE); Assert.assertEquals(1, selector.getValueCardinality()); diff --git a/processing/src/test/java/org/apache/druid/query/dimension/RegexFilteredDimensionSpecTest.java b/processing/src/test/java/org/apache/druid/query/dimension/RegexFilteredDimensionSpecTest.java index 147c124a9c3..fba5ab9e6a7 100644 --- a/processing/src/test/java/org/apache/druid/query/dimension/RegexFilteredDimensionSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/dimension/RegexFilteredDimensionSpecTest.java @@ -84,7 +84,7 @@ public class RegexFilteredDimensionSpecTest "[c,g]" ); - DimensionSelector selector = spec.decorate(TestDimensionSelector.instance); + DimensionSelector selector = spec.decorate(TestDimensionSelector.INSTANCE); Assert.assertEquals(2, selector.getValueCardinality()); diff --git a/processing/src/test/java/org/apache/druid/query/dimension/TestDimensionSelector.java b/processing/src/test/java/org/apache/druid/query/dimension/TestDimensionSelector.java index 1a70662e8ab..e7733974a39 100644 --- a/processing/src/test/java/org/apache/druid/query/dimension/TestDimensionSelector.java +++ b/processing/src/test/java/org/apache/druid/query/dimension/TestDimensionSelector.java @@ -37,7 +37,7 @@ import javax.annotation.Nullable; */ class TestDimensionSelector extends AbstractDimensionSelector { - public static final TestDimensionSelector instance = new TestDimensionSelector(); + public static final TestDimensionSelector INSTANCE = new TestDimensionSelector(); private TestDimensionSelector() { diff --git a/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java index 1987c5f1381..15a1d15ac04 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/CascadeExtractionFnTest.java @@ -34,7 +34,7 @@ import java.util.Set; public class CascadeExtractionFnTest { - private static final String[] paths = { + private static final String[] PATHS = { "/druid/prod/historical", "/druid/prod/broker", "/druid/prod/coordinator", @@ -71,7 +71,7 @@ public class CascadeExtractionFnTest CascadeExtractionFn cascadeExtractionFn = new CascadeExtractionFn(fns); Set extracted = new LinkedHashSet<>(); - for (String path : paths) { + for (String path : PATHS) { extracted.add(cascadeExtractionFn.apply(path)); } diff --git a/processing/src/test/java/org/apache/druid/query/extraction/JavaScriptExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/JavaScriptExtractionFnTest.java index 2402cc288f2..818c1fe9c85 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/JavaScriptExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/JavaScriptExtractionFnTest.java @@ -39,7 +39,7 @@ public class JavaScriptExtractionFnTest @Rule public ExpectedException expectedException = ExpectedException.none(); - private static final String[] testStrings = { + private static final String[] TEST_STRINGS = { "Quito", "Calgary", "Tokyo", @@ -56,7 +56,7 @@ public class JavaScriptExtractionFnTest String function = "function(str) { return str.substring(0,3); }"; ExtractionFn extractionFn = new JavaScriptExtractionFn(function, false, JavaScriptConfig.getEnabledInstance()); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); Assert.assertEquals(str.substring(0, 3), res); } @@ -118,7 +118,7 @@ public class JavaScriptExtractionFnTest ExtractionFn extractionFn = new JavaScriptExtractionFn(function, false, JavaScriptConfig.getEnabledInstance()); Iterator it = Iterators.forArray("Qt", "Clgry", "Tky", "Stckhlm", "Vncvr", "Prtr", "Wllngtn", "Ontr"); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); Assert.assertEquals(it.next(), res); } diff --git a/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractionFnSerDeTest.java b/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractionFnSerDeTest.java index 9d30db7ce2f..92e5ef77889 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractionFnSerDeTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/MapLookupExtractionFnSerDeTest.java @@ -40,7 +40,7 @@ import java.util.UUID; public class MapLookupExtractionFnSerDeTest { private static ObjectMapper mapper; - private static final Map renames = ImmutableMap.of( + private static final Map RENAMES = ImmutableMap.of( "foo", "bar", "bar", "baz" ); @@ -58,11 +58,11 @@ public class MapLookupExtractionFnSerDeTest final DimExtractionFn fn = mapper.readerFor(DimExtractionFn.class).readValue( StringUtils.format( "{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}}", - mapper.writeValueAsString(renames) + mapper.writeValueAsString(RENAMES) ) ); - for (String key : renames.keySet()) { - Assert.assertEquals(renames.get(key), fn.apply(key)); + for (String key : RENAMES.keySet()) { + Assert.assertEquals(RENAMES.get(key), fn.apply(key)); } final String crazyString = UUID.randomUUID().toString(); Assert.assertEquals(null, fn.apply(crazyString)); @@ -74,7 +74,7 @@ public class MapLookupExtractionFnSerDeTest .readValue( StringUtils.format( "{\"type\":\"lookup\",\"lookup\":{\"type\":\"map\", \"map\":%s}, \"retainMissingValue\":true}", - mapper.writeValueAsString(renames) + mapper.writeValueAsString(RENAMES) ) ) .apply(crazyString) diff --git a/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java index 97efc985c4c..0db848bc95c 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/MatchingDimExtractionFnTest.java @@ -34,7 +34,7 @@ import java.util.Set; */ public class MatchingDimExtractionFnTest { - private static final String[] testStrings = { + private static final String[] TEST_STRINGS = { "Quito", "Calgary", "Tokyo", @@ -54,7 +54,7 @@ public class MatchingDimExtractionFnTest List expected = Arrays.asList("Quito", "Tokyo", "Stockholm", "Pretoria", "Wellington"); Set extracted = new HashSet<>(); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); if (res != null) { extracted.add(res); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java index 42757983f53..7b2dcc001be 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/RegexDimExtractionFnTest.java @@ -34,7 +34,7 @@ import java.util.Set; */ public class RegexDimExtractionFnTest { - private static final String[] paths = { + private static final String[] PATHS = { "/druid/prod/historical", "/druid/prod/broker", "/druid/prod/coordinator", @@ -45,7 +45,7 @@ public class RegexDimExtractionFnTest "/dash/baloo" }; - private static final String[] testStrings = { + private static final String[] TEST_STRINGS = { "apple", "awesome", "asylum", @@ -61,7 +61,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); Set extracted = new LinkedHashSet<>(); - for (String path : paths) { + for (String path : PATHS) { extracted.add(extractionFn.apply(path)); } @@ -76,7 +76,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); Set extracted = new LinkedHashSet<>(); - for (String path : paths) { + for (String path : PATHS) { extracted.add(extractionFn.apply(path)); } @@ -96,7 +96,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, 0, true, null); Set extracted = new LinkedHashSet<>(); - for (String path : paths) { + for (String path : PATHS) { extracted.add(extractionFn.apply(path)); } @@ -113,7 +113,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, 2, true, null); Set extracted = new LinkedHashSet<>(); - for (String path : paths) { + for (String path : PATHS) { extracted.add(extractionFn.apply(path)); } @@ -133,7 +133,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, false, null); Set extracted = new LinkedHashSet<>(); - for (String testString : testStrings) { + for (String testString : TEST_STRINGS) { extracted.add(extractionFn.apply(testString)); } @@ -194,7 +194,7 @@ public class RegexDimExtractionFnTest ExtractionFn extractionFn = new RegexDimExtractionFn(regex, true, "foobar"); Set extracted = new LinkedHashSet<>(); - for (String testString : testStrings) { + for (String testString : TEST_STRINGS) { extracted.add(extractionFn.apply(testString)); } @@ -213,7 +213,7 @@ public class RegexDimExtractionFnTest ExtractionFn nullExtractionFn = new RegexDimExtractionFn(regex, true, null); Set extracted2 = new LinkedHashSet<>(); - for (String testString : testStrings) { + for (String testString : TEST_STRINGS) { extracted2.add(nullExtractionFn.apply(testString)); } diff --git a/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java index 39047bc5245..5ad437b4112 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/SearchQuerySpecDimExtractionFnTest.java @@ -35,7 +35,7 @@ import java.util.List; */ public class SearchQuerySpecDimExtractionFnTest { - private static final String[] testStrings = { + private static final String[] TEST_STRINGS = { "Kyoto", "Calgary", "Tokyo", @@ -56,7 +56,7 @@ public class SearchQuerySpecDimExtractionFnTest List expected = ImmutableList.of("Kyoto", "Tokyo", "Toyokawa", "Yorktown"); List extracted = new ArrayList<>(); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); if (res != null) { extracted.add(res); @@ -77,7 +77,7 @@ public class SearchQuerySpecDimExtractionFnTest List expected = ImmutableList.of("Kyoto"); List extracted = new ArrayList<>(); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); if (res != null) { extracted.add(res); @@ -98,7 +98,7 @@ public class SearchQuerySpecDimExtractionFnTest List expected = ImmutableList.of("Tokyo", "Toyokawa"); List extracted = new ArrayList<>(); - for (String str : testStrings) { + for (String str : TEST_STRINGS) { String res = extractionFn.apply(str); if (res != null) { extracted.add(res); diff --git a/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java index 3b0720667c0..c77727f1fab 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/TimeDimExtractionFnTest.java @@ -33,7 +33,7 @@ import java.util.Set; */ public class TimeDimExtractionFnTest { - private static final String[] dims = { + private static final String[] DIMS = { "01/01/2012", "01/02/2012", "03/03/2012", @@ -64,7 +64,7 @@ public class TimeDimExtractionFnTest Set months = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", false); - for (String dim : dims) { + for (String dim : DIMS) { months.add(extractionFn.apply(dim)); } @@ -81,7 +81,7 @@ public class TimeDimExtractionFnTest Set months = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "MM/yyyy", true); - for (String dim : dims) { + for (String dim : DIMS) { months.add(extractionFn.apply(dim)); } @@ -98,7 +98,7 @@ public class TimeDimExtractionFnTest Set quarters = new HashSet<>(); ExtractionFn extractionFn = new TimeDimExtractionFn("MM/dd/yyyy", "QQQ/yyyy", false); - for (String dim : dims) { + for (String dim : DIMS) { quarters.add(extractionFn.apply(dim)); } diff --git a/processing/src/test/java/org/apache/druid/query/extraction/TimeFormatExtractionFnTest.java b/processing/src/test/java/org/apache/druid/query/extraction/TimeFormatExtractionFnTest.java index 70035f72d4c..f0245f53368 100644 --- a/processing/src/test/java/org/apache/druid/query/extraction/TimeFormatExtractionFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/extraction/TimeFormatExtractionFnTest.java @@ -33,7 +33,7 @@ import java.util.Arrays; public class TimeFormatExtractionFnTest { - private static final long[] timestamps = { + private static final long[] TIMESTAMPS = { DateTimes.of("2015-01-01T23:00:00Z").getMillis(), DateTimes.of("2015-01-02T23:00:00Z").getMillis(), DateTimes.of("2015-03-03T23:00:00Z").getMillis(), @@ -46,12 +46,12 @@ public class TimeFormatExtractionFnTest public void testDayOfWeekExtraction() throws Exception { TimeFormatExtractionFn fn = new TimeFormatExtractionFn("EEEE", null, null, null, false); - Assert.assertEquals("Thursday", fn.apply(timestamps[0])); - Assert.assertEquals("Friday", fn.apply(timestamps[1])); - Assert.assertEquals("Tuesday", fn.apply(timestamps[2])); - Assert.assertEquals("Wednesday", fn.apply(timestamps[3])); - Assert.assertEquals("Saturday", fn.apply(timestamps[4])); - Assert.assertEquals("Monday", fn.apply(timestamps[5])); + Assert.assertEquals("Thursday", fn.apply(TIMESTAMPS[0])); + Assert.assertEquals("Friday", fn.apply(TIMESTAMPS[1])); + Assert.assertEquals("Tuesday", fn.apply(TIMESTAMPS[2])); + Assert.assertEquals("Wednesday", fn.apply(TIMESTAMPS[3])); + Assert.assertEquals("Saturday", fn.apply(TIMESTAMPS[4])); + Assert.assertEquals("Monday", fn.apply(TIMESTAMPS[5])); testSerde(fn, "EEEE", null, null, Granularities.NONE); } @@ -60,12 +60,12 @@ public class TimeFormatExtractionFnTest public void testLocalizedExtraction() throws Exception { TimeFormatExtractionFn fn = new TimeFormatExtractionFn("EEEE", null, "is", null, false); - Assert.assertEquals("fimmtudagur", fn.apply(timestamps[0])); - Assert.assertEquals("föstudagur", fn.apply(timestamps[1])); - Assert.assertEquals("þriðjudagur", fn.apply(timestamps[2])); - Assert.assertEquals("miðvikudagur", fn.apply(timestamps[3])); - Assert.assertEquals("laugardagur", fn.apply(timestamps[4])); - Assert.assertEquals("mánudagur", fn.apply(timestamps[5])); + Assert.assertEquals("fimmtudagur", fn.apply(TIMESTAMPS[0])); + Assert.assertEquals("föstudagur", fn.apply(TIMESTAMPS[1])); + Assert.assertEquals("þriðjudagur", fn.apply(TIMESTAMPS[2])); + Assert.assertEquals("miðvikudagur", fn.apply(TIMESTAMPS[3])); + Assert.assertEquals("laugardagur", fn.apply(TIMESTAMPS[4])); + Assert.assertEquals("mánudagur", fn.apply(TIMESTAMPS[5])); testSerde(fn, "EEEE", null, "is", Granularities.NONE); } @@ -74,12 +74,12 @@ public class TimeFormatExtractionFnTest public void testGranularExtractionWithNullPattern() throws Exception { TimeFormatExtractionFn fn = new TimeFormatExtractionFn(null, null, null, Granularities.DAY, false); - Assert.assertEquals("2015-01-01T00:00:00.000Z", fn.apply(timestamps[0])); - Assert.assertEquals("2015-01-02T00:00:00.000Z", fn.apply(timestamps[1])); - Assert.assertEquals("2015-03-03T00:00:00.000Z", fn.apply(timestamps[2])); - Assert.assertEquals("2015-03-04T00:00:00.000Z", fn.apply(timestamps[3])); - Assert.assertEquals("2015-05-02T00:00:00.000Z", fn.apply(timestamps[4])); - Assert.assertEquals("2015-12-21T00:00:00.000Z", fn.apply(timestamps[5])); + Assert.assertEquals("2015-01-01T00:00:00.000Z", fn.apply(TIMESTAMPS[0])); + Assert.assertEquals("2015-01-02T00:00:00.000Z", fn.apply(TIMESTAMPS[1])); + Assert.assertEquals("2015-03-03T00:00:00.000Z", fn.apply(TIMESTAMPS[2])); + Assert.assertEquals("2015-03-04T00:00:00.000Z", fn.apply(TIMESTAMPS[3])); + Assert.assertEquals("2015-05-02T00:00:00.000Z", fn.apply(TIMESTAMPS[4])); + Assert.assertEquals("2015-12-21T00:00:00.000Z", fn.apply(TIMESTAMPS[5])); testSerde(fn, null, null, null, Granularities.DAY); } @@ -94,12 +94,12 @@ public class TimeFormatExtractionFnTest null, false ); - Assert.assertEquals("In Berlin ist es schon Freitag", fn.apply(timestamps[0])); - Assert.assertEquals("In Berlin ist es schon Samstag", fn.apply(timestamps[1])); - Assert.assertEquals("In Berlin ist es schon Mittwoch", fn.apply(timestamps[2])); - Assert.assertEquals("In Berlin ist es schon Donnerstag", fn.apply(timestamps[3])); - Assert.assertEquals("In Berlin ist es schon Sonntag", fn.apply(timestamps[4])); - Assert.assertEquals("In Berlin ist es schon Dienstag", fn.apply(timestamps[5])); + Assert.assertEquals("In Berlin ist es schon Freitag", fn.apply(TIMESTAMPS[0])); + Assert.assertEquals("In Berlin ist es schon Samstag", fn.apply(TIMESTAMPS[1])); + Assert.assertEquals("In Berlin ist es schon Mittwoch", fn.apply(TIMESTAMPS[2])); + Assert.assertEquals("In Berlin ist es schon Donnerstag", fn.apply(TIMESTAMPS[3])); + Assert.assertEquals("In Berlin ist es schon Sonntag", fn.apply(TIMESTAMPS[4])); + Assert.assertEquals("In Berlin ist es schon Dienstag", fn.apply(TIMESTAMPS[5])); testSerde(fn, "'In Berlin ist es schon 'EEEE", DateTimes.inferTzFromString("Europe/Berlin"), "de", Granularities.NONE); } diff --git a/processing/src/test/java/org/apache/druid/query/filter/BoundDimFilterTest.java b/processing/src/test/java/org/apache/druid/query/filter/BoundDimFilterTest.java index c9e18de4bee..f3778658fd6 100644 --- a/processing/src/test/java/org/apache/druid/query/filter/BoundDimFilterTest.java +++ b/processing/src/test/java/org/apache/druid/query/filter/BoundDimFilterTest.java @@ -41,7 +41,7 @@ import java.util.Arrays; @RunWith(Parameterized.class) public class BoundDimFilterTest { - private static final ExtractionFn extractionFn = new RegexDimExtractionFn(".*", false, null); + private static final ExtractionFn EXTRACTION_FN = new RegexDimExtractionFn(".*", false, null); @Parameterized.Parameters public static Iterable constructorFeeder() @@ -64,7 +64,7 @@ public class BoundDimFilterTest StringComparators.ALPHANUMERIC)}, new Object[]{new BoundDimFilter("dimension", "12", "15", true, true, false, null, StringComparators.LEXICOGRAPHIC)}, - new Object[]{new BoundDimFilter("dimension", null, "15", null, true, true, extractionFn, + new Object[]{new BoundDimFilter("dimension", null, "15", null, true, true, EXTRACTION_FN, StringComparators.ALPHANUMERIC)} ); } @@ -95,8 +95,8 @@ public class BoundDimFilterTest BoundDimFilter anotherBoundDimFilter = new BoundDimFilter("dimension", "12", "15", true, null, false, null, StringComparators.LEXICOGRAPHIC); Assert.assertFalse(Arrays.equals(anotherBoundDimFilter.getCacheKey(), boundDimFilter.getCacheKey())); - BoundDimFilter boundDimFilterWithExtract = new BoundDimFilter("dimension", "12", "15", null, null, true, extractionFn, StringComparators.ALPHANUMERIC); - BoundDimFilter boundDimFilterWithExtractCopy = new BoundDimFilter("dimension", "12", "15", false, false, true, extractionFn, StringComparators.ALPHANUMERIC); + BoundDimFilter boundDimFilterWithExtract = new BoundDimFilter("dimension", "12", "15", null, null, true, EXTRACTION_FN, StringComparators.ALPHANUMERIC); + BoundDimFilter boundDimFilterWithExtractCopy = new BoundDimFilter("dimension", "12", "15", false, false, true, EXTRACTION_FN, StringComparators.ALPHANUMERIC); Assert.assertFalse(Arrays.equals(boundDimFilter.getCacheKey(), boundDimFilterWithExtract.getCacheKey())); Assert.assertArrayEquals(boundDimFilterWithExtract.getCacheKey(), boundDimFilterWithExtractCopy.getCacheKey()); } @@ -105,7 +105,7 @@ public class BoundDimFilterTest public void testHashCode() { BoundDimFilter boundDimFilter = new BoundDimFilter("dimension", "12", "15", null, null, true, null, StringComparators.ALPHANUMERIC); - BoundDimFilter boundDimFilterWithExtract = new BoundDimFilter("dimension", "12", "15", null, null, true, extractionFn, StringComparators.ALPHANUMERIC); + BoundDimFilter boundDimFilterWithExtract = new BoundDimFilter("dimension", "12", "15", null, null, true, EXTRACTION_FN, StringComparators.ALPHANUMERIC); Assert.assertNotEquals(boundDimFilter.hashCode(), boundDimFilterWithExtract.hashCode()); } diff --git a/processing/src/test/java/org/apache/druid/query/filter/GetDimensionRangeSetTest.java b/processing/src/test/java/org/apache/druid/query/filter/GetDimensionRangeSetTest.java index 63e672fa63f..6057f96bb0a 100644 --- a/processing/src/test/java/org/apache/druid/query/filter/GetDimensionRangeSetTest.java +++ b/processing/src/test/java/org/apache/druid/query/filter/GetDimensionRangeSetTest.java @@ -83,8 +83,8 @@ public class GetDimensionRangeSetTest null ); - private static final RangeSet all = rangeSet(ImmutableList.of(Range.all())); - private static final RangeSet empty = rangeSet(ImmutableList.of()); + private static final RangeSet ALL = rangeSet(ImmutableList.of(Range.all())); + private static final RangeSet EMPTY = rangeSet(ImmutableList.of()); @Test public void testSimpleFilter() @@ -123,7 +123,7 @@ public class GetDimensionRangeSetTest public void testAndFilter() { DimFilter and1 = new AndDimFilter(ImmutableList.of(selector1, selector2, in1)); - Assert.assertEquals(empty, and1.getDimensionRangeSet("dim1")); + Assert.assertEquals(EMPTY, and1.getDimensionRangeSet("dim1")); Assert.assertNull(and1.getDimensionRangeSet("dim2")); DimFilter and2 = new AndDimFilter(ImmutableList.of(selector3, bound1, other1)); @@ -133,7 +133,7 @@ public class GetDimensionRangeSetTest DimFilter and3 = new AndDimFilter(ImmutableList.of(in2, bound1, bound2, bound3, bound4)); RangeSet expected3 = rangeSet(Range.openClosed("notincluded", "tillend")); Assert.assertEquals(expected3, and3.getDimensionRangeSet("dim1")); - Assert.assertEquals(empty, and3.getDimensionRangeSet("dim2")); + Assert.assertEquals(EMPTY, and3.getDimensionRangeSet("dim2")); DimFilter and4 = new AndDimFilter(ImmutableList.of(in3, bound3)); RangeSet expected4 = rangeSet(point("null")); @@ -158,7 +158,7 @@ public class GetDimensionRangeSetTest Assert.assertEquals(expected2, or2.getDimensionRangeSet("dim1")); DimFilter or3 = new OrDimFilter(ImmutableList.of(bound1, bound2, bound3)); - Assert.assertEquals(all, or3.getDimensionRangeSet("dim1")); + Assert.assertEquals(ALL, or3.getDimensionRangeSet("dim1")); DimFilter or4 = new OrDimFilter(ImmutableList.of(selector1, selector2, selector3, selector4, selector5)); Assert.assertNull(or4.getDimensionRangeSet("dim1")); diff --git a/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java index 379c7e04d31..b0e6fa6e768 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/DefaultGroupByQueryMetricsTest.java @@ -56,7 +56,7 @@ public class DefaultGroupByQueryMetricsTest DefaultGroupByQueryMetrics queryMetrics = new DefaultGroupByQueryMetrics(TestHelper.makeJsonMapper()); GroupByQuery.Builder builder = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec( "quality", "alias", @@ -67,7 +67,7 @@ public class DefaultGroupByQueryMetricsTest true, false ) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setContext(ImmutableMap.of("bySegment", true)); @@ -81,7 +81,7 @@ public class DefaultGroupByQueryMetricsTest Assert.assertTrue(actualEvent.containsKey("timestamp")); Assert.assertEquals("", actualEvent.get("host")); Assert.assertEquals("", actualEvent.get("service")); - Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); + Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); Interval expectedInterval = Intervals.of("2011-04-02/2011-04-04"); Assert.assertEquals(Collections.singletonList(expectedInterval.toString()), actualEvent.get(DruidMetrics.INTERVAL)); diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java index d3183f5d53c..5a0b51dbe2f 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -140,15 +140,15 @@ public class GroupByQueryMergeBufferTest configSupplier, new GroupByStrategyV1( configSupplier, - new GroupByQueryEngine(configSupplier, bufferPool), + new GroupByQueryEngine(configSupplier, BUFFER_POOL), QueryRunnerTestHelper.NOOP_QUERYWATCHER, - bufferPool + BUFFER_POOL ), new GroupByStrategyV2( PROCESSING_CONFIG, configSupplier, - bufferPool, - mergeBufferPool, + BUFFER_POOL, + MERGE_BUFFER_POOL, mapper, QueryRunnerTestHelper.NOOP_QUERYWATCHER ) @@ -160,17 +160,17 @@ public class GroupByQueryMergeBufferTest return new GroupByQueryRunnerFactory(strategySelector, toolChest); } - private static final CloseableStupidPool bufferPool = new CloseableStupidPool<>( + private static final CloseableStupidPool BUFFER_POOL = new CloseableStupidPool<>( "GroupByQueryEngine-bufferPool", () -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()) ); - private static final TestBlockingPool mergeBufferPool = new TestBlockingPool( + private static final TestBlockingPool MERGE_BUFFER_POOL = new TestBlockingPool( () -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()), PROCESSING_CONFIG.getNumMergeBuffers() ); - private static final GroupByQueryRunnerFactory factory = makeQueryRunnerFactory( + private static final GroupByQueryRunnerFactory FACTORY = makeQueryRunnerFactory( GroupByQueryRunnerTest.DEFAULT_MAPPER, new GroupByQueryConfig() { @@ -187,15 +187,15 @@ public class GroupByQueryMergeBufferTest @AfterClass public static void teardownClass() { - bufferPool.close(); - mergeBufferPool.close(); + BUFFER_POOL.close(); + MERGE_BUFFER_POOL.close(); } @Parameters(name = "{0}") public static Collection constructorFeeder() { final List args = new ArrayList<>(); - for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { + for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(FACTORY)) { args.add(new Object[]{runner}); } return args; @@ -203,13 +203,13 @@ public class GroupByQueryMergeBufferTest public GroupByQueryMergeBufferTest(QueryRunner runner) { - this.runner = factory.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner)); + this.runner = FACTORY.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner)); } @Before public void setup() { - mergeBufferPool.resetMinRemainBufferNum(); + MERGE_BUFFER_POOL.resetMinRemainBufferNum(); } @Test @@ -217,17 +217,17 @@ public class GroupByQueryMergeBufferTest { final GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); - Assert.assertEquals(2, mergeBufferPool.getMinRemainBufferNum()); - Assert.assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(2, MERGE_BUFFER_POOL.getMinRemainBufferNum()); + Assert.assertEquals(3, MERGE_BUFFER_POOL.getPoolSize()); } @Test @@ -238,24 +238,24 @@ public class GroupByQueryMergeBufferTest .setDataSource( new QueryDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); - Assert.assertEquals(1, mergeBufferPool.getMinRemainBufferNum()); - Assert.assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(1, MERGE_BUFFER_POOL.getMinRemainBufferNum()); + Assert.assertEquals(3, MERGE_BUFFER_POOL.getPoolSize()); } @Test @@ -268,34 +268,34 @@ public class GroupByQueryMergeBufferTest GroupByQuery.builder() .setDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) ) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); // This should be 0 because the broker needs 2 buffers and the queryable node needs one. - Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); - Assert.assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(0, MERGE_BUFFER_POOL.getMinRemainBufferNum()); + Assert.assertEquals(3, MERGE_BUFFER_POOL.getPoolSize()); } @Test @@ -310,8 +310,8 @@ public class GroupByQueryMergeBufferTest GroupByQuery.builder() .setDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), @@ -319,35 +319,35 @@ public class GroupByQueryMergeBufferTest new DefaultDimensionSpec("placement", null) )) .setAggregatorSpecs(Collections.singletonList( - QueryRunnerTestHelper.rowsCount)) + QueryRunnerTestHelper.ROWS_COUNT)) .build() ) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) ) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, TIMEOUT)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); // This should be 0 because the broker needs 2 buffers and the queryable node needs one. - Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); - Assert.assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(0, MERGE_BUFFER_POOL.getMinRemainBufferNum()); + Assert.assertEquals(3, MERGE_BUFFER_POOL.getPoolSize()); } } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChestTest.java index 5195afdae5f..d843060f27b 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryQueryToolChestTest.java @@ -75,30 +75,30 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias - 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final CacheStrategy strategy1 = new GroupByQueryQueryToolChest( @@ -123,16 +123,16 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -145,16 +145,16 @@ public class GroupByQueryQueryToolChestTest final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias - 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -187,16 +187,16 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -205,21 +205,21 @@ public class GroupByQueryQueryToolChestTest Integer.MAX_VALUE ) ) - .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.uniqueMetric, 8)) + .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.UNIQUE_METRIC, 8)) .build(); final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -228,7 +228,7 @@ public class GroupByQueryQueryToolChestTest Integer.MAX_VALUE ) ) - .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.uniqueMetric, 10)) + .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.UNIQUE_METRIC, 10)) .build(); final CacheStrategy strategy1 = new GroupByQueryQueryToolChest( @@ -275,16 +275,16 @@ public class GroupByQueryQueryToolChestTest final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -298,16 +298,16 @@ public class GroupByQueryQueryToolChestTest final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -370,16 +370,16 @@ public class GroupByQueryQueryToolChestTest ); final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -393,16 +393,16 @@ public class GroupByQueryQueryToolChestTest final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -436,21 +436,21 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market") )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("alias"), ImmutableList.of("market"), @@ -460,21 +460,21 @@ public class GroupByQueryQueryToolChestTest final GroupByQuery query2 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market") )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("alias"), ImmutableList.of() @@ -512,12 +512,12 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Collections.singletonList(DefaultDimensionSpec.of("test"))) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .setPostAggregatorSpecs(Collections.singletonList(new ConstantPostAggregator("post", 10))) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQueryQueryToolChest toolChest = new GroupByQueryQueryToolChest( @@ -648,21 +648,21 @@ public class GroupByQueryQueryToolChestTest { final GroupByQuery query1 = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Collections.singletonList( new DefaultDimensionSpec("test", "test", valueType) )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, getComplexAggregatorFactoryForValueType(valueType) ) ) .setPostAggregatorSpecs( ImmutableList.of(new ConstantPostAggregator("post", 10)) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); CacheStrategy strategy = diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java index 2f855384283..8585aae4b55 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerFailureTest.java @@ -103,15 +103,15 @@ public class GroupByQueryRunnerFailureTest configSupplier, new GroupByStrategyV1( configSupplier, - new GroupByQueryEngine(configSupplier, bufferPool), + new GroupByQueryEngine(configSupplier, BUFFER_POOL), QueryRunnerTestHelper.NOOP_QUERYWATCHER, - bufferPool + BUFFER_POOL ), new GroupByStrategyV2( DEFAULT_PROCESSING_CONFIG, configSupplier, - bufferPool, - mergeBufferPool, + BUFFER_POOL, + MERGE_BUFFER_POOL, mapper, QueryRunnerTestHelper.NOOP_QUERYWATCHER ) @@ -123,7 +123,7 @@ public class GroupByQueryRunnerFailureTest return new GroupByQueryRunnerFactory(strategySelector, toolChest); } - private static final CloseableStupidPool bufferPool = new CloseableStupidPool<>( + private static final CloseableStupidPool BUFFER_POOL = new CloseableStupidPool<>( "GroupByQueryEngine-bufferPool", new Supplier() { @@ -134,7 +134,7 @@ public class GroupByQueryRunnerFailureTest } } ); - private static final CloseableDefaultBlockingPool mergeBufferPool = new CloseableDefaultBlockingPool<>( + private static final CloseableDefaultBlockingPool MERGE_BUFFER_POOL = new CloseableDefaultBlockingPool<>( new Supplier() { @Override @@ -146,7 +146,7 @@ public class GroupByQueryRunnerFailureTest DEFAULT_PROCESSING_CONFIG.getNumMergeBuffers() ); - private static final GroupByQueryRunnerFactory factory = makeQueryRunnerFactory( + private static final GroupByQueryRunnerFactory FACTORY = makeQueryRunnerFactory( GroupByQueryRunnerTest.DEFAULT_MAPPER, new GroupByQueryConfig() { @@ -163,15 +163,15 @@ public class GroupByQueryRunnerFailureTest @AfterClass public static void teardownClass() { - bufferPool.close(); - mergeBufferPool.close(); + BUFFER_POOL.close(); + MERGE_BUFFER_POOL.close(); } @Parameters(name = "{0}") public static Collection constructorFeeder() { final List args = new ArrayList<>(); - for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { + for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(FACTORY)) { args.add(new Object[]{runner}); } return args; @@ -179,7 +179,7 @@ public class GroupByQueryRunnerFailureTest public GroupByQueryRunnerFailureTest(QueryRunner runner) { - this.runner = factory.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner)); + this.runner = FACTORY.mergeRunners(Execs.directExecutor(), ImmutableList.of(runner)); } @Test(timeout = 60_000L) @@ -194,21 +194,21 @@ public class GroupByQueryRunnerFailureTest .setDataSource( new QueryDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); } @Test(timeout = 60_000L) @@ -223,30 +223,30 @@ public class GroupByQueryRunnerFailureTest GroupByQuery.builder() .setDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", null) ) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); } @Test(timeout = 60_000L, expected = InsufficientResourcesException.class) @@ -257,24 +257,24 @@ public class GroupByQueryRunnerFailureTest .setDataSource( new QueryDataSource( GroupByQuery.builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setGranularity(Granularities.ALL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) + .setAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT)) .build() ) ) .setGranularity(Granularities.ALL) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows")) .setContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 500)) .build(); List> holder = null; try { - holder = mergeBufferPool.takeBatch(1, 10); - GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); + holder = MERGE_BUFFER_POOL.takeBatch(1, 10); + GroupByQueryRunnerTestHelper.runQuery(FACTORY, runner, query); } finally { if (holder != null) { diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java index b233fd8b039..6270f6bf822 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryRunnerTest.java @@ -184,7 +184,7 @@ public class GroupByQueryRunnerTest } }; - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); private final QueryRunner runner; private final String runnerName; @@ -416,7 +416,7 @@ public class GroupByQueryRunnerTest for (GroupByQueryConfig config : testConfigs()) { final Pair factoryAndCloser = makeQueryRunnerFactory(config); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; - resourceCloser.register(factoryAndCloser.rhs); + RESOURCE_CLOSER.register(factoryAndCloser.rhs); for (QueryRunner runner : QueryRunnerTestHelper.makeQueryRunners(factory)) { for (boolean vectorize : ImmutableList.of(false, true)) { final String testName = StringUtils.format("config=%s, runner=%s, vectorize=%s", config, runner, vectorize); @@ -435,7 +435,7 @@ public class GroupByQueryRunnerTest @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } public GroupByQueryRunnerTest( @@ -457,16 +457,16 @@ public class GroupByQueryRunnerTest public void testGroupBy() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -736,13 +736,13 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("nonexistent0", "alias0"), new ExtractionDimensionSpec("nonexistent1", "alias1", new StringFormatExtractionFn("foo")) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.allGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -763,16 +763,16 @@ public class GroupByQueryRunnerTest public void testGroupByWithStringPostAggregator() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( ImmutableList.of( new ExpressionPostAggregator("post", "alias + 'x'", null, TestExprMacroTable.INSTANCE) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of( @@ -1015,8 +1015,8 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns( new ExpressionVirtualColumn( "vc", @@ -1026,8 +1026,8 @@ public class GroupByQueryRunnerTest ) ) .setDimensions(new DefaultDimensionSpec("vc", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1078,10 +1078,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithDurationGranularity() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new DurationGranularity(86400L, 0L)) .build(); @@ -1118,11 +1118,11 @@ public class GroupByQueryRunnerTest expectedException.expectMessage("[alias] already defined"); makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("alias", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("alias", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); } @@ -1133,11 +1133,11 @@ public class GroupByQueryRunnerTest return; } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("sortByDimsFirst", true)) .build(); @@ -1178,11 +1178,11 @@ public class GroupByQueryRunnerTest public void testGroupByWithChunkPeriod() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .overrideContext(ImmutableMap.of("chunkPeriod", "P1D")) .build(); @@ -1206,10 +1206,10 @@ public class GroupByQueryRunnerTest public void testGroupByNoAggregators() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1245,11 +1245,11 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("placementish", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1275,14 +1275,14 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimFilter(new SelectorDimFilter("placementish", "a", null)) .setDimensions( new DefaultDimensionSpec("placementish", "alias"), new DefaultDimensionSpec("placementish", "alias2") - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1347,13 +1347,13 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("placementish", "alias"), new DefaultDimensionSpec("quality", "quality") - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1586,13 +1586,13 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("quality", "quality"), new DefaultDimensionSpec("placementish", "alias") - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -1822,11 +1822,11 @@ public class GroupByQueryRunnerTest public void testGroupByMaxRowsLimitContextOverride() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("maxResults", 1)) .build(); @@ -1883,11 +1883,11 @@ public class GroupByQueryRunnerTest public void testGroupByTimeoutContextOverride() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 60000)) .build(); @@ -1921,11 +1921,11 @@ public class GroupByQueryRunnerTest public void testGroupByMaxOnDiskStorageContextOverride() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 1)) .build(); @@ -1983,11 +1983,11 @@ public class GroupByQueryRunnerTest public void testNotEnoughDictionarySpaceThroughContextOverride() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("maxOnDiskStorage", 0, "maxMergingDictionarySize", 1)) .build(); @@ -2045,11 +2045,11 @@ public class GroupByQueryRunnerTest public void testNotEnoughDiskSpaceThroughContextOverride() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("maxOnDiskStorage", 1, "maxMergingDictionarySize", 1)) .build(); @@ -2112,10 +2112,10 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterMaxOnDiskStorageContextOverride() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.ASCENDING)), @@ -2132,9 +2132,9 @@ public class GroupByQueryRunnerTest final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .overrideContext(ImmutableMap.of("maxOnDiskStorage", 0, "bufferGrouperMaxSize", 0)) .build(); @@ -2171,13 +2171,13 @@ public class GroupByQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), false, null, false, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2242,13 +2242,13 @@ public class GroupByQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), true, null, false, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2310,8 +2310,8 @@ public class GroupByQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new ExtractionDimensionSpec( "quality", @@ -2319,8 +2319,8 @@ public class GroupByQueryRunnerTest new LookupExtractionFn(new MapLookupExtractor(map, false), true, null, true, false) ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2364,13 +2364,13 @@ public class GroupByQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), false, "MISSING", true, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2413,13 +2413,13 @@ public class GroupByQueryRunnerTest map.put("technology", "technology0"); map.put("travel", "travel0"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), false, null, true, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2470,10 +2470,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithUniques() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.qualityUniques) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.QUALITY_UNIQUES) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -2495,9 +2495,9 @@ public class GroupByQueryRunnerTest public void testGroupByWithUniquesAndPostAggWithSameName() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new HyperUniquesAggregatorFactory( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new HyperUniquesAggregatorFactory( "quality_uniques", "quality_uniques" )) @@ -2506,7 +2506,7 @@ public class GroupByQueryRunnerTest new HyperUniqueFinalizingPostAggregator("quality_uniques", "quality_uniques") ) ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -2531,10 +2531,10 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.qualityCardinality) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.QUALITY_CARDINALITY) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -2559,14 +2559,14 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("market", "market")) .setAggregatorSpecs( new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") ) - .setGranularity(QueryRunnerTestHelper.monthGran) + .setGranularity(QueryRunnerTestHelper.MONTH_GRAN) .build(); List expectedResults = Arrays.asList( @@ -2649,17 +2649,17 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.EMPTY_INTERVAL) .setDimensions(new DefaultDimensionSpec("market", "market")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityCardinality, + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_CARDINALITY, new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = ImmutableList.of(); @@ -2688,10 +2688,10 @@ public class GroupByQueryRunnerTest } }; GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimensions(new ExtractionDimensionSpec("quality", "alias", nullExtractionFn)) .build(); @@ -2747,10 +2747,10 @@ public class GroupByQueryRunnerTest }; GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimensions(new ExtractionDimensionSpec("quality", "alias", emptyStringExtractionFn)) .build(); @@ -2787,10 +2787,10 @@ public class GroupByQueryRunnerTest DateTimeZone tz = DateTimes.inferTzFromString("America/Los_Angeles"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory( + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" )) @@ -2995,10 +2995,10 @@ public class GroupByQueryRunnerTest public void testMergeResults() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); final GroupByQuery fullQuery = builder.build(); @@ -3072,10 +3072,10 @@ public class GroupByQueryRunnerTest private void doTestMergeResultsWithValidLimit(final int limit) { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setLimit(limit); @@ -3188,10 +3188,10 @@ public class GroupByQueryRunnerTest { final int limit = 14; GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(Granularities.DAY) .setLimit(limit) .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING); @@ -3233,8 +3233,8 @@ public class GroupByQueryRunnerTest final int limit = 14; GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns( new ExpressionVirtualColumn( "expr", @@ -3244,7 +3244,7 @@ public class GroupByQueryRunnerTest ) ) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "expr")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "expr")) .setGranularity(Granularities.DAY) .setLimit(limit) .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING); @@ -3282,10 +3282,10 @@ public class GroupByQueryRunnerTest public void testMergeResultsWithNegativeLimit() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setLimit(-1); @@ -3303,10 +3303,10 @@ public class GroupByQueryRunnerTest }; GroupByQuery baseQuery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .build(); @@ -3381,10 +3381,10 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn("rows") .addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); @@ -3414,7 +3414,7 @@ public class GroupByQueryRunnerTest // Now try it with an expression based aggregator. List aggregatorSpecs = Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx", null, "index / 2 + indexMin", TestExprMacroTable.INSTANCE) ); builder.setLimit(Integer.MAX_VALUE).setAggregatorSpecs(aggregatorSpecs); @@ -3452,7 +3452,7 @@ public class GroupByQueryRunnerTest TestExprMacroTable.INSTANCE ); List aggregatorSpecs2 = Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx", "expr") ); builder.setLimit(Integer.MAX_VALUE).setVirtualColumns(expressionVirtualColumn).setAggregatorSpecs(aggregatorSpecs2); @@ -3473,10 +3473,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithOrderLimit2() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn("rows", OrderByColumnSpec.Direction.DESCENDING) .addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); @@ -3508,10 +3508,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithOrderLimit3() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx", "index")) .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING) .addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); @@ -3545,10 +3545,10 @@ public class GroupByQueryRunnerTest public void testGroupByOrderLimitNumeric() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "rows", OrderByColumnSpec.Direction.DESCENDING, @@ -3588,18 +3588,18 @@ public class GroupByQueryRunnerTest public void testGroupByWithSameCaseOrdering() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 3 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .build(); List expectedResults = Arrays.asList( @@ -3637,21 +3637,21 @@ public class GroupByQueryRunnerTest public void testGroupByWithOrderLimit4() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( - new OrderByColumnSpec(QueryRunnerTestHelper.marketDimension, OrderByColumnSpec.Direction.DESCENDING) + new OrderByColumnSpec(QueryRunnerTestHelper.MARKET_DIMENSION, OrderByColumnSpec.Direction.DESCENDING) ), 3 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .build(); List expectedResults = Arrays.asList( @@ -3675,26 +3675,26 @@ public class GroupByQueryRunnerTest public void testGroupByWithOrderOnHyperUnique() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( - new OrderByColumnSpec(QueryRunnerTestHelper.uniqueMetric, OrderByColumnSpec.Direction.DESCENDING) + new OrderByColumnSpec(QueryRunnerTestHelper.UNIQUE_METRIC, OrderByColumnSpec.Direction.DESCENDING) ), 3 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.qualityUniques) + ).setAggregatorSpecs(QueryRunnerTestHelper.QUALITY_UNIQUES) .setPostAggregatorSpecs( Collections.singletonList( new HyperUniqueFinalizingPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.uniqueMetric + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, + QueryRunnerTestHelper.UNIQUE_METRIC ) ) ) @@ -3706,9 +3706,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "spot", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 ), makeRow( @@ -3716,9 +3716,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "upfront", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 ), makeRow( @@ -3726,9 +3726,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "total_market", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 ) ); @@ -3741,28 +3741,28 @@ public class GroupByQueryRunnerTest public void testGroupByWithHavingOnHyperUnique() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( - new OrderByColumnSpec(QueryRunnerTestHelper.uniqueMetric, OrderByColumnSpec.Direction.DESCENDING) + new OrderByColumnSpec(QueryRunnerTestHelper.UNIQUE_METRIC, OrderByColumnSpec.Direction.DESCENDING) ), 3 ) ) - .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.uniqueMetric, 8)) - .setAggregatorSpecs(QueryRunnerTestHelper.qualityUniques) + .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.UNIQUE_METRIC, 8)) + .setAggregatorSpecs(QueryRunnerTestHelper.QUALITY_UNIQUES) .setPostAggregatorSpecs( Collections.singletonList( new HyperUniqueFinalizingPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.uniqueMetric + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, + QueryRunnerTestHelper.UNIQUE_METRIC ) ) ) @@ -3774,9 +3774,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "spot", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 ) ); @@ -3789,31 +3789,31 @@ public class GroupByQueryRunnerTest public void testGroupByWithHavingOnFinalizedHyperUnique() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( new OrderByColumnSpec( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, OrderByColumnSpec.Direction.DESCENDING ) ), 3 ) ) - .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 8)) - .setAggregatorSpecs(QueryRunnerTestHelper.qualityUniques) + .setHavingSpec(new GreaterThanHavingSpec(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, 8)) + .setAggregatorSpecs(QueryRunnerTestHelper.QUALITY_UNIQUES) .setPostAggregatorSpecs( Collections.singletonList( new HyperUniqueFinalizingPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.uniqueMetric + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, + QueryRunnerTestHelper.UNIQUE_METRIC ) ) ) @@ -3825,9 +3825,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "spot", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 ) ); @@ -3840,28 +3840,28 @@ public class GroupByQueryRunnerTest public void testGroupByWithLimitOnFinalizedHyperUnique() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList( new OrderByColumnSpec( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, OrderByColumnSpec.Direction.DESCENDING ) ), 3 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.qualityUniques) + ).setAggregatorSpecs(QueryRunnerTestHelper.QUALITY_UNIQUES) .setPostAggregatorSpecs( Collections.singletonList( new HyperUniqueFinalizingPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.uniqueMetric + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, + QueryRunnerTestHelper.UNIQUE_METRIC ) ) ) @@ -3873,9 +3873,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "spot", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 ), makeRow( @@ -3883,9 +3883,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "upfront", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 ), makeRow( @@ -3893,9 +3893,9 @@ public class GroupByQueryRunnerTest "1970-01-01T00:00:00.000Z", "market", "total_market", - QueryRunnerTestHelper.uniqueMetric, + QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2, - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 ) ); @@ -3922,19 +3922,19 @@ public class GroupByQueryRunnerTest map.put("travel", "travel555"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), false, null, false, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("alias", null, StringComparators.ALPHANUMERIC)), null ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -3980,19 +3980,19 @@ public class GroupByQueryRunnerTest map.put("travel", "1"); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird).setDimensions(new ExtractionDimensionSpec( + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD).setDimensions(new ExtractionDimensionSpec( "quality", "alias", new LookupExtractionFn(new MapLookupExtractor(map, false), false, null, false, false) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("alias", null, StringComparators.ALPHANUMERIC)), 11 ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("sortByDimsFirst", true)) .build(); @@ -4026,12 +4026,12 @@ public class GroupByQueryRunnerTest public void testLimitPerGrouping() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.dayGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION )) - .setInterval(QueryRunnerTestHelper.firstToThird) + .setInterval(QueryRunnerTestHelper.FIRST_TO_THIRD) // Using a limitSpec here to achieve a per group limit is incorrect. // Limit is applied on the overall results. .setLimitSpec( @@ -4039,7 +4039,7 @@ public class GroupByQueryRunnerTest Collections.singletonList(new OrderByColumnSpec("rows", OrderByColumnSpec.Direction.DESCENDING)), 2 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .build(); List expectedResults = Arrays.asList( @@ -4064,15 +4064,15 @@ public class GroupByQueryRunnerTest public void testPostAggMergedHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( - ImmutableList.of(new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L)) + ImmutableList.of(new GreaterThanHavingSpec(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 1000L)) ) ); @@ -4087,7 +4087,7 @@ public class GroupByQueryRunnerTest 6L, "index", 4420L, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4420L + 1L) ), makeRow( @@ -4099,7 +4099,7 @@ public class GroupByQueryRunnerTest 6L, "index", 4416L, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4416L + 1L) ) ); @@ -4134,10 +4134,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithOrderLimitHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-01-25/2011-01-28") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new DoubleSumAggregatorFactory("index", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("index", "index")) .setGranularity(Granularities.ALL) .setHavingSpec(new GreaterThanHavingSpec("index", 310L)) .setLimitSpec( @@ -4169,16 +4169,16 @@ public class GroupByQueryRunnerTest public void testPostAggHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.addRowsIndexConstant)) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setPostAggregatorSpecs(ImmutableList.of(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( ImmutableList.of( - new GreaterThanHavingSpec(QueryRunnerTestHelper.addRowsIndexConstantMetric, 1000L) + new GreaterThanHavingSpec(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 1000L) ) ) ); @@ -4195,7 +4195,7 @@ public class GroupByQueryRunnerTest 6L, "index", 4420L, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4420L + 1L) ), makeRow( @@ -4207,7 +4207,7 @@ public class GroupByQueryRunnerTest 6L, "index", 4416L, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, (double) (6L + 4416L + 1L) ) ); @@ -4224,10 +4224,10 @@ public class GroupByQueryRunnerTest public void testHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( @@ -4272,10 +4272,10 @@ public class GroupByQueryRunnerTest ); GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec(havingSpec); @@ -4322,10 +4322,10 @@ public class GroupByQueryRunnerTest ); GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec(havingSpec); @@ -4348,10 +4348,10 @@ public class GroupByQueryRunnerTest public void testMergedHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setHavingSpec( new OrHavingSpec( @@ -4400,10 +4400,10 @@ public class GroupByQueryRunnerTest public void testMergedPostAggHavingSpec() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( Collections.singletonList( new ArithmeticPostAggregator( @@ -4497,14 +4497,14 @@ public class GroupByQueryRunnerTest public void testCustomAggregatorHavingSpec() { GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new TestBigDecimalSumAggregatorFactory("idxDouble", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setHavingSpec( new OrHavingSpec( ImmutableList.of( @@ -4537,11 +4537,11 @@ public class GroupByQueryRunnerTest public void testGroupByWithRegEx() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimFilter(new RegexDimFilter("quality", "auto.*", null)) .setDimensions(new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); final GroupByQuery query = builder.build(); @@ -4558,10 +4558,10 @@ public class GroupByQueryRunnerTest public void testGroupByWithNonexistentDimension() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .addDimension("billy") - .addDimension("quality").setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + .addDimension("quality").setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); final GroupByQuery query = builder.build(); @@ -4614,8 +4614,8 @@ public class GroupByQueryRunnerTest public void testIdenticalSubquery() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", @@ -4624,19 +4624,19 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() )) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -4670,8 +4670,8 @@ public class GroupByQueryRunnerTest public void testSubqueryWithMultipleIntervalsInOuterQuery() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", @@ -4680,11 +4680,11 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() )) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() @@ -4699,7 +4699,7 @@ public class GroupByQueryRunnerTest ) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -4733,8 +4733,8 @@ public class GroupByQueryRunnerTest public void testSubqueryWithMultipleIntervalsInOuterQueryAndChunkPeriod() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", @@ -4743,11 +4743,11 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() )) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("chunkPeriod", "P1D")) .build(); @@ -4763,7 +4763,7 @@ public class GroupByQueryRunnerTest ) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -4799,8 +4799,8 @@ public class GroupByQueryRunnerTest //https://github.com/apache/incubator-druid/issues/2556 GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", @@ -4809,11 +4809,11 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() )) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() @@ -4827,7 +4827,7 @@ public class GroupByQueryRunnerTest ) .setDimensions(new ExtractionDimensionSpec("alias", "alias", new RegexDimExtractionFn("(a).*", true, "a"))) .setAggregatorSpecs(new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -4847,26 +4847,26 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DoubleMaxAggregatorFactory("idx", "idx"), new DoubleMaxAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = makeRows( @@ -4886,7 +4886,7 @@ public class GroupByQueryRunnerTest new ExpressionVirtualColumn("expr", "-index + 100", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "expr"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) @@ -4910,10 +4910,10 @@ public class GroupByQueryRunnerTest public void testDifferentGroupingSubqueryMultipleAggregatorsOnSameField() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setPostAggregatorSpecs( Collections.singletonList( new ArithmeticPostAggregator( @@ -4926,19 +4926,19 @@ public class GroupByQueryRunnerTest ) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs( new DoubleMaxAggregatorFactory("idx1", "idx"), new DoubleMaxAggregatorFactory("idx2", "idx"), new DoubleMaxAggregatorFactory("idx3", "post_agg"), new DoubleMaxAggregatorFactory("idx4", "post_agg") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -4959,16 +4959,16 @@ public class GroupByQueryRunnerTest public void testDifferentGroupingSubqueryWithFilter() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new DoubleMaxAggregatorFactory("idx", "idx")) .setDimFilter( new OrDimFilter( @@ -4985,7 +4985,7 @@ public class GroupByQueryRunnerTest ) ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -5001,18 +5001,18 @@ public class GroupByQueryRunnerTest public void testDifferentIntervalSubquery() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.secondOnly) + .setQuerySegmentSpec(QueryRunnerTestHelper.SECOND_ONLY) .setAggregatorSpecs(new DoubleMaxAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -5032,8 +5032,8 @@ public class GroupByQueryRunnerTest ); makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions( new DefaultDimensionSpec("market", "market"), new ExtractionDimensionSpec( @@ -5042,9 +5042,9 @@ public class GroupByQueryRunnerTest new TimeFormatExtractionFn("EEEE", null, null, null, false) ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum) - .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimFilter( new OrDimFilter( Arrays.asList( @@ -5066,11 +5066,11 @@ public class GroupByQueryRunnerTest ); makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "__time")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setHavingSpec( new OrHavingSpec( ImmutableList.of( @@ -5092,18 +5092,18 @@ public class GroupByQueryRunnerTest public void testEmptySubquery() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.EMPTY_INTERVAL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new DoubleMaxAggregatorFactory("idx", "idx")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -5114,15 +5114,15 @@ public class GroupByQueryRunnerTest public void testSubqueryWithPostAggregators() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", "function(dim){ return true; }", null, JavaScriptConfig.getEnabledInstance() - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx_subagg", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx_subagg", "index")) .setPostAggregatorSpecs( Collections.singletonList( new ArithmeticPostAggregator( @@ -5134,12 +5134,12 @@ public class GroupByQueryRunnerTest ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs( new LongSumAggregatorFactory("rows", "rows"), @@ -5156,7 +5156,7 @@ public class GroupByQueryRunnerTest ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -5388,10 +5388,10 @@ public class GroupByQueryRunnerTest public void testSubqueryWithPostAggregatorsAndHaving() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx_subagg", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx_subagg", "index")) .setPostAggregatorSpecs( Collections.singletonList( new ArithmeticPostAggregator( @@ -5432,12 +5432,12 @@ public class GroupByQueryRunnerTest } ) .addOrderByColumn("alias") - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs( new LongSumAggregatorFactory("rows", "rows"), @@ -5454,7 +5454,7 @@ public class GroupByQueryRunnerTest ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -5665,8 +5665,8 @@ public class GroupByQueryRunnerTest cannotVectorize(); final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "market", @@ -5675,7 +5675,7 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() )) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("idx_subagg", "index"), new JavaScriptAggregatorFactory( "js_agg", @@ -5726,12 +5726,12 @@ public class GroupByQueryRunnerTest } ) .addOrderByColumn("alias") - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs( new LongSumAggregatorFactory("rows", "rows"), @@ -5760,7 +5760,7 @@ public class GroupByQueryRunnerTest 5 ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -5845,20 +5845,20 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterFilterAggregator() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final DimFilter filter = new SelectorDimFilter("market", "spot", null); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(Collections.emptyList()) - .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, filter)) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.ROWS_COUNT, filter)) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -5872,11 +5872,11 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterTimeFilter() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final DimFilter fridayFilter = new SelectorDimFilter( @@ -5891,11 +5891,11 @@ public class GroupByQueryRunnerTest ); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(Collections.emptyList()) .setDimFilter(firstDaysFilter) - .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.rowsCount, fridayFilter)) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(new FilteredAggregatorFactory(QueryRunnerTestHelper.ROWS_COUNT, fridayFilter)) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -5917,17 +5917,17 @@ public class GroupByQueryRunnerTest public void testSubqueryWithContextTimeout() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .overrideContext(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 10000)) .build(); @@ -5942,18 +5942,18 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterVirtualColumns() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns(new ExpressionVirtualColumn("expr", "1", ValueType.FLOAT, TestExprMacroTable.INSTANCE)) .setDimensions(new ArrayList<>()).setAggregatorSpecs(new LongSumAggregatorFactory("count", "expr")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -5967,23 +5967,23 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterCardinalityAggregator() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(Collections.emptyList()) .setAggregatorSpecs(new CardinalityAggregatorFactory( "car", ImmutableList.of(new DefaultDimensionSpec("quality", "quality")), false )) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -5997,10 +5997,10 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterCountAggregator() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setLimitSpec( new DefaultLimitSpec( ImmutableList.of(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.ASCENDING)), @@ -6011,9 +6011,9 @@ public class GroupByQueryRunnerTest final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ArrayList<>()).setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); // v1 strategy throws an exception for this query because it tries to merge the noop outer @@ -6038,16 +6038,16 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterDimJavascriptAggregators() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "quality")) .setAggregatorSpecs(new JavaScriptAggregatorFactory( "js_agg", @@ -6057,7 +6057,7 @@ public class GroupByQueryRunnerTest "function(a,b){return a + b;}", JavaScriptConfig.getEnabledInstance() )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -6089,16 +6089,16 @@ public class GroupByQueryRunnerTest public void testSubqueryWithOuterJavascriptAggregators() { final GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("market", "market"), new DefaultDimensionSpec("quality", "quality")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("index", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("index", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); final GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "quality")) .setAggregatorSpecs(new JavaScriptAggregatorFactory( "js_agg", @@ -6108,7 +6108,7 @@ public class GroupByQueryRunnerTest "function(a,b){return a + b;}", JavaScriptConfig.getEnabledInstance() )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -6140,27 +6140,27 @@ public class GroupByQueryRunnerTest public void testSubqueryWithHyperUniques() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new HyperUniquesAggregatorFactory("quality_uniques", "quality_uniques") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias")) .setAggregatorSpecs( new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx"), new HyperUniquesAggregatorFactory("uniq", "quality_uniques") ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -6283,11 +6283,11 @@ public class GroupByQueryRunnerTest public void testSubqueryWithHyperUniquesPostAggregator() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ArrayList<>()) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new HyperUniquesAggregatorFactory("quality_uniques_inner", "quality_uniques") ) @@ -6296,12 +6296,12 @@ public class GroupByQueryRunnerTest new FieldAccessPostAggregator("quality_uniques_inner_post", "quality_uniques_inner") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ArrayList<>()) .setAggregatorSpecs( new LongSumAggregatorFactory("rows", "rows"), @@ -6313,7 +6313,7 @@ public class GroupByQueryRunnerTest new HyperUniqueFinalizingPostAggregator("quality_uniques_outer_post", "quality_uniques_outer") ) ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -6343,27 +6343,27 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(new DefaultDimensionSpec("market", "market")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongFirstAggregatorFactory("innerfirst", "index"), new LongLastAggregatorFactory("innerlast", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .overrideContext(ImmutableMap.of("finalize", true)) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions(Collections.emptyList()) .setAggregatorSpecs( new LongFirstAggregatorFactory("first", "innerfirst"), new LongLastAggregatorFactory("last", "innerlast") ) - .setGranularity(QueryRunnerTestHelper.monthGran) + .setGranularity(QueryRunnerTestHelper.MONTH_GRAN) .build(); List expectedResults = Arrays.asList( @@ -6388,8 +6388,8 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns(new ExpressionVirtualColumn("alias", "quality", ValueType.STRING, TestExprMacroTable.INSTANCE)) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("market", "market2"), @@ -6397,11 +6397,11 @@ public class GroupByQueryRunnerTest )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("market2"), ImmutableList.of() @@ -6504,8 +6504,8 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns(new ExpressionVirtualColumn("alias", "quality", ValueType.STRING, TestExprMacroTable.INSTANCE)) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "quality2"), @@ -6514,7 +6514,7 @@ public class GroupByQueryRunnerTest )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) ) @@ -6523,7 +6523,7 @@ public class GroupByQueryRunnerTest new FieldAccessPostAggregator("idxPostAgg", "idx") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("alias2"), ImmutableList.of("market2"), @@ -6860,8 +6860,8 @@ public class GroupByQueryRunnerTest GroupByQuery query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setVirtualColumns(new ExpressionVirtualColumn("alias", "quality", ValueType.STRING, TestExprMacroTable.INSTANCE)) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "quality"), @@ -6870,14 +6870,14 @@ public class GroupByQueryRunnerTest )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new FloatSumAggregatorFactory("idxFloat", "indexFloat"), new DoubleSumAggregatorFactory("idxDouble", "index") ) ) .setDimFilter(new SelectorDimFilter("alias", "automotive", null)) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("alias_renamed"), ImmutableList.of() @@ -6952,19 +6952,19 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("qualityLong", "ql", ValueType.LONG), new DefaultDimensionSpec("market", "market2") )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("ql"), ImmutableList.of("market2"), @@ -7247,19 +7247,19 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(Lists.newArrayList( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market") )) .setAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setSubtotalsSpec(ImmutableList.of( ImmutableList.of("alias"), ImmutableList.of("market"), @@ -7313,14 +7313,14 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.jsCountIfTimeGreaterThan, - QueryRunnerTestHelper.__timeLongSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.JS_COUNT_IF_TIME_GREATER_THAN, + QueryRunnerTestHelper.TIME_LONG_SUM ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -7347,8 +7347,8 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions( new DefaultDimensionSpec("market", "market"), new ExtractionDimensionSpec( @@ -7357,9 +7357,9 @@ public class GroupByQueryRunnerTest new TimeFormatExtractionFn("EEEE", null, null, null, false) ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum) - .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimFilter( new OrDimFilter( Arrays.asList( @@ -7612,8 +7612,8 @@ public class GroupByQueryRunnerTest }; GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setDimensions( new DefaultDimensionSpec("market", "market"), new ExtractionDimensionSpec( @@ -7624,9 +7624,9 @@ public class GroupByQueryRunnerTest ) ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum) - .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) - .setGranularity(QueryRunnerTestHelper.allGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .setPostAggregatorSpecs(Collections.singletonList(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .setDimFilter( new OrDimFilter( Arrays.asList( @@ -7844,10 +7844,10 @@ public class GroupByQueryRunnerTest public void testBySegmentResults() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setContext(ImmutableMap.of("bySegment", true)); @@ -7869,7 +7869,7 @@ public class GroupByQueryRunnerTest 4420L ) ), - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); @@ -7904,7 +7904,7 @@ public class GroupByQueryRunnerTest public void testBySegmentResultsUnOptimizedDimextraction() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec( "quality", "alias", @@ -7915,7 +7915,7 @@ public class GroupByQueryRunnerTest false, false ) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .setContext(ImmutableMap.of("bySegment", true)); @@ -7937,7 +7937,7 @@ public class GroupByQueryRunnerTest 4420L ) ), - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); @@ -7971,7 +7971,7 @@ public class GroupByQueryRunnerTest public void testBySegmentResultsOptimizedDimextraction() { GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04").setDimensions(new ExtractionDimensionSpec( "quality", "alias", @@ -7982,7 +7982,7 @@ public class GroupByQueryRunnerTest true, false ) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(new SelectorDimFilter("quality", "mezzanine", null)) .overrideContext(ImmutableMap.of("bySegment", true)); @@ -8004,7 +8004,7 @@ public class GroupByQueryRunnerTest 4420L ) ), - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); @@ -8058,14 +8058,14 @@ public class GroupByQueryRunnerTest ); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter(new OrDimFilter(dimFilters)) .build(); List expectedResults = Arrays.asList( @@ -8112,14 +8112,14 @@ public class GroupByQueryRunnerTest MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false); LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter(new ExtractionDimFilter("quality", "", lookupExtractionFn, null)) .build(); @@ -8152,14 +8152,14 @@ public class GroupByQueryRunnerTest LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter( new ExtractionDimFilter("quality", "NOT_THERE", lookupExtractionFn, null) ) @@ -8188,14 +8188,14 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("null_column", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter( new ExtractionDimFilter( "null_column", @@ -8233,17 +8233,17 @@ public class GroupByQueryRunnerTest LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, "missing", true, false); DimFilter filter = new ExtractionDimFilter("quality", "mezzanineANDnews", lookupExtractionFn, null); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs(new FilteredAggregatorFactory( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, filter ), new FilteredAggregatorFactory( new LongSumAggregatorFactory("idx", "index"), filter )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( makeRow( @@ -8408,14 +8408,14 @@ public class GroupByQueryRunnerTest MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false); LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter( new ExtractionDimFilter( "quality", @@ -8454,11 +8454,11 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("null_column", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter(new ExtractionDimFilter("null_column", "EMPTY", lookupExtractionFn, null)) .build(); List expectedResults = Arrays @@ -8509,10 +8509,10 @@ public class GroupByQueryRunnerTest DimFilter superFilter = new AndDimFilter(superFilterList); GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setDimFilter(superFilter) .overrideContext(ImmutableMap.of("bySegment", true)); @@ -8534,7 +8534,7 @@ public class GroupByQueryRunnerTest 4420L ) ), - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("2011-04-02T00:00:00.000Z/2011-04-04T00:00:00.000Z") ) ); @@ -8592,14 +8592,14 @@ public class GroupByQueryRunnerTest DimFilter superFilter = new AndDimFilter(superFilterList); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("null_column", "alias")) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setDimFilter(superFilter).build(); List expectedResults = Arrays.asList( @@ -8621,19 +8621,19 @@ public class GroupByQueryRunnerTest ExtractionFn helloFn = new JavaScriptExtractionFn(helloJsFn, false, JavaScriptConfig.getEnabledInstance()); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("market", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new CardinalityAggregatorFactory( + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new CardinalityAggregatorFactory( "numVals", ImmutableList.of(new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, helloFn )), false )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -8710,18 +8710,18 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("market", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new CardinalityAggregatorFactory( + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new CardinalityAggregatorFactory( "numVals", ImmutableList.of(new DefaultDimensionSpec( - QueryRunnerTestHelper.indexMetric, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.INDEX_METRIC, + QueryRunnerTestHelper.INDEX_METRIC )), false )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -8800,17 +8800,17 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "ql_alias", OrderByColumnSpec.Direction.ASCENDING, StringComparators.NUMERIC )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); Assert.assertEquals(Functions.>identity(), query.getLimitSpec().build(query)); @@ -8850,17 +8850,17 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG)) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "ql_alias", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC )) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); Assert.assertNotEquals(Functions.>identity(), query.getLimitSpec().build(query)); @@ -8906,12 +8906,12 @@ public class GroupByQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ExtractionDimensionSpec("qualityLong", "ql_alias", jsExtractionFn)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -8949,12 +8949,12 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -8993,12 +8993,12 @@ public class GroupByQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ExtractionDimensionSpec("__time", "time_alias", jsExtractionFn)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9036,17 +9036,17 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "index_alias", OrderByColumnSpec.Direction.ASCENDING, StringComparators.NUMERIC )) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); Assert.assertEquals(Functions.>identity(), query.getLimitSpec().build(query)); @@ -9087,17 +9087,17 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("qualityFloat", "qf_alias", ValueType.FLOAT)) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "qf_alias", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC )) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); Assert.assertNotEquals(Functions.>identity(), query.getLimitSpec().build(query)); @@ -9137,17 +9137,17 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("qualityDouble", "alias", ValueType.DOUBLE)) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .addOrderByColumn(new OrderByColumnSpec( "alias", OrderByColumnSpec.Direction.DESCENDING, StringComparators.NUMERIC )) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); Assert.assertNotEquals(Functions.>identity(), query.getLimitSpec().build(query)); @@ -9193,12 +9193,12 @@ public class GroupByQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ExtractionDimensionSpec("index", "index_alias", jsExtractionFn)) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults; @@ -9239,14 +9239,14 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG), new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG), new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .setHavingSpec( new DimFilterHavingSpec( new AndDimFilter( @@ -9268,7 +9268,7 @@ public class GroupByQueryRunnerTest null ) ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -9296,15 +9296,15 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("qualityLong", "ql_alias"), new DefaultDimensionSpec("qualityFloat", "qf_alias") ) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9346,28 +9346,28 @@ public class GroupByQueryRunnerTest } GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("qualityLong", "ql_alias"), new DefaultDimensionSpec("qualityFloat", "qf_alias"), new DefaultDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "time_alias") ) .setDimFilter(new SelectorDimFilter("quality", "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery outerQuery = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("time_alias", "time_alias2", ValueType.LONG), new DefaultDimensionSpec("ql_alias", "ql_alias_long", ValueType.LONG), new DefaultDimensionSpec("qf_alias", "qf_alias_float", ValueType.FLOAT), new DefaultDimensionSpec("ql_alias", "ql_alias_float", ValueType.FLOAT) ).setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9419,12 +9419,12 @@ public class GroupByQueryRunnerTest ); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(regexSpec, listFilteredSpec) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .addOrderByColumn("ql") .build(); @@ -9474,12 +9474,12 @@ public class GroupByQueryRunnerTest ); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(regexSpec, listFilteredSpec) .setDimFilter(new InDimFilter("quality", Arrays.asList("entertainment", "technology"), null)) .setAggregatorSpecs(new CountAggregatorFactory("count")) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults; if (NullHandling.replaceWithDefault()) { @@ -9531,8 +9531,8 @@ public class GroupByQueryRunnerTest } GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG), @@ -9544,13 +9544,13 @@ public class GroupByQueryRunnerTest Collections.singletonList("entertainment"), null ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery outerQuery = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("ql_alias", "quallong", ValueType.LONG), new DefaultDimensionSpec("qf_alias", "qualfloat", ValueType.FLOAT) @@ -9576,7 +9576,7 @@ public class GroupByQueryRunnerTest new LongSumAggregatorFactory("ql_alias_sum", "ql_alias"), new DoubleSumAggregatorFactory("qf_alias_sum", "qf_alias") ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -9607,19 +9607,19 @@ public class GroupByQueryRunnerTest } GroupByQuery subQuery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("market", "alias"), new DefaultDimensionSpec("__time", "time_alias", ValueType.LONG), new DefaultDimensionSpec("index", "index_alias", ValueType.FLOAT) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.allGran) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); GroupByQuery outerQuery = makeQueryBuilder() .setDataSource(subQuery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("alias", "market"), new DefaultDimensionSpec("time_alias", "time_alias2", ValueType.LONG) @@ -9628,7 +9628,7 @@ public class GroupByQueryRunnerTest new LongMaxAggregatorFactory("time_alias_max", "time_alias"), new DoubleMaxAggregatorFactory("index_alias_max", "index_alias") ) - .setGranularity(QueryRunnerTestHelper.allGran) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9700,17 +9700,17 @@ public class GroupByQueryRunnerTest ExtractionFn strlenFn = StrlenExtractionFn.instance(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, "alias", ValueType.LONG, strlenFn )) - .setDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "entertainment", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", null)) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9747,12 +9747,12 @@ public class GroupByQueryRunnerTest cannotVectorize(); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("qlLong", "qualityLong"), new DoubleSumAggregatorFactory("qlFloat", "qualityLong"), new JavaScriptAggregatorFactory( @@ -9774,7 +9774,7 @@ public class GroupByQueryRunnerTest JavaScriptConfig.getEnabledInstance() ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -9823,8 +9823,8 @@ public class GroupByQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec( "qualityFloat", "qf_inner", @@ -9832,20 +9832,20 @@ public class GroupByQueryRunnerTest jsExtractionFn )) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery outerQuery = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias"), new ExtractionDimensionSpec( "qf_inner", "qf_outer", ValueType.FLOAT, jsExtractionFn - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.allGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -9868,8 +9868,8 @@ public class GroupByQueryRunnerTest } GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec( @@ -9880,20 +9880,20 @@ public class GroupByQueryRunnerTest ) ) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery outerQuery = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("alias", "alias"), new ExtractionDimensionSpec( "time_day", "time_week", ValueType.LONG, new TimeFormatExtractionFn(null, null, null, Granularities.WEEK, true) - )).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) - .setGranularity(QueryRunnerTestHelper.allGran) + )).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) .build(); List expectedResults = Collections.singletonList( @@ -9911,12 +9911,12 @@ public class GroupByQueryRunnerTest return; } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec( @@ -9925,7 +9925,7 @@ public class GroupByQueryRunnerTest )), 2 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)) .build(); @@ -9959,10 +9959,10 @@ public class GroupByQueryRunnerTest return; } GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("alias", OrderByColumnSpec.Direction.DESCENDING)), @@ -10025,10 +10025,10 @@ public class GroupByQueryRunnerTest return; } GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("idx", OrderByColumnSpec.Direction.DESCENDING)), @@ -10088,10 +10088,10 @@ public class GroupByQueryRunnerTest return; } GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Lists.newArrayList( @@ -10156,10 +10156,10 @@ public class GroupByQueryRunnerTest return; } GroupByQuery.Builder builder = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setInterval("2011-04-02/2011-04-04") .setDimensions(new DefaultDimensionSpec("quality", "alias"), new DefaultDimensionSpec("market", "market")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Lists.newArrayList( @@ -10237,12 +10237,12 @@ public class GroupByQueryRunnerTest expectedException.expectMessage("Limit push down when sorting by a post aggregator is not supported."); GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec( - QueryRunnerTestHelper.marketDimension, + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec( + QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias" )) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec( @@ -10251,7 +10251,7 @@ public class GroupByQueryRunnerTest )), 2 ) - ).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + ).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .setPostAggregatorSpecs( Collections.singletonList(new ConstantPostAggregator("constant", 1)) ) @@ -10270,10 +10270,10 @@ public class GroupByQueryRunnerTest public void testEmptySubqueryWithLimitPushDown() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.emptyInterval) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.EMPTY_INTERVAL) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec( @@ -10283,15 +10283,15 @@ public class GroupByQueryRunnerTest 5 ) ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() .setDataSource(subquery) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setAggregatorSpecs(new DoubleMaxAggregatorFactory("idx", "idx")) .setLimitSpec(new DefaultLimitSpec(null, 5)) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); Iterable results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); @@ -10303,8 +10303,8 @@ public class GroupByQueryRunnerTest public void testSubqueryWithMultipleIntervalsInOuterQueryWithLimitPushDown() { GroupByQuery subquery = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) .setDimFilter(new JavaScriptDimFilter( "quality", @@ -10319,11 +10319,11 @@ public class GroupByQueryRunnerTest ) ) .setAggregatorSpecs( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index"), new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); GroupByQuery query = makeQueryBuilder() @@ -10346,7 +10346,7 @@ public class GroupByQueryRunnerTest new LongSumAggregatorFactory("rows", "rows"), new LongSumAggregatorFactory("idx", "idx") ) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( @@ -10377,17 +10377,17 @@ public class GroupByQueryRunnerTest expectedException.expectMessage("Cannot force limit push down when a having spec is present."); makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setGranularity(QueryRunnerTestHelper.allGran) - .setDimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "marketalias")) - .setInterval(QueryRunnerTestHelper.fullOnIntervalSpec) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setGranularity(QueryRunnerTestHelper.ALL_GRAN) + .setDimensions(new DefaultDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, "marketalias")) + .setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .setLimitSpec( new DefaultLimitSpec( Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)), 2 ) ) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT) .overrideContext(ImmutableMap.of(GroupByQueryConfig.CTX_KEY_FORCE_LIMIT_PUSH_DOWN, true)) .setHavingSpec(new GreaterThanHavingSpec("rows", 10)) .build(); @@ -10405,15 +10405,15 @@ public class GroupByQueryRunnerTest } GroupByQuery query = makeQueryBuilder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions( new DefaultDimensionSpec("quality", "alias"), new ExtractionDimensionSpec("quality", "qualityLen", ValueType.LONG, StrlenExtractionFn.instance()) ) .setDimFilter(new SelectorDimFilter("quality", "technology", null)) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .build(); List expectedResults = Arrays.asList( diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java index 9f0ce6d7350..34a7d5a5253 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java @@ -47,18 +47,18 @@ import java.util.List; public class GroupByQueryTest { - private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); @Test public void testQuerySerialization() throws IOException { Query query = GroupByQuery .builder() - .setDataSource(QueryRunnerTestHelper.dataSource) - .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) + .setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD) .setDimensions(new DefaultDimensionSpec("quality", "alias")) - .setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new LongSumAggregatorFactory("idx", "index")) - .setGranularity(QueryRunnerTestHelper.dayGran) + .setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "index")) + .setGranularity(QueryRunnerTestHelper.DAY_GRAN) .setPostAggregatorSpecs(ImmutableList.of(new FieldAccessPostAggregator("x", "idx"))) .setLimitSpec( new DefaultLimitSpec( @@ -72,8 +72,8 @@ public class GroupByQueryTest ) .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } @@ -105,7 +105,7 @@ public class GroupByQueryTest "2011-11-07/2011-11-08"))); QuerySegmentSpec outerQuerySegmentSpec = new MultipleIntervalSegmentSpec(Collections.singletonList((Intervals.of( "2011-11-04/2011-11-08")))); - List aggs = Collections.singletonList(QueryRunnerTestHelper.rowsCount); + List aggs = Collections.singletonList(QueryRunnerTestHelper.ROWS_COUNT); final GroupByQuery innerQuery = GroupByQuery.builder() .setDataSource("blah") .setInterval(innerQuerySegmentSpec) diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java index f2cf1c41b05..8ddeb57f623 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByTimeseriesQueryRunnerTest.java @@ -60,12 +60,12 @@ import java.util.List; @RunWith(Parameterized.class) public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @SuppressWarnings("unchecked") @@ -78,7 +78,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest config ); final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs; - resourceCloser.register(factoryAndCloser.rhs); + RESOURCE_CLOSER.register(factoryAndCloser.rhs); final List constructors = new ArrayList<>(); @@ -149,7 +149,7 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest public GroupByTimeseriesQueryRunnerTest(QueryRunner runner, boolean vectorize) { - super(runner, false, vectorize, QueryRunnerTestHelper.commonDoubleAggregators); + super(runner, false, vectorize, QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS); } // GroupBy handles timestamps differently when granularity is ALL @@ -158,9 +158,9 @@ public class GroupByTimeseriesQueryRunnerTest extends TimeseriesQueryRunnerTest public void testFullOnTimeseriesMaxMin() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java index c7f65f51926..4f3a81dbb20 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java @@ -46,14 +46,14 @@ import java.util.Map; */ public class SegmentAnalyzerTest { - private static final EnumSet emptyAnalyses = + private static final EnumSet EMPTY_ANALYSES = EnumSet.noneOf(SegmentMetadataQuery.AnalysisType.class); @Test public void testIncrementalWorks() { testIncrementalWorksHelper(null); - testIncrementalWorksHelper(emptyAnalyses); + testIncrementalWorksHelper(EMPTY_ANALYSES); } private void testIncrementalWorksHelper(EnumSet analyses) @@ -112,7 +112,7 @@ public class SegmentAnalyzerTest public void testMappedWorks() { testMappedWorksHelper(null); - testMappedWorksHelper(emptyAnalyses); + testMappedWorksHelper(EMPTY_ANALYSES); } private void testMappedWorksHelper(EnumSet analyses) diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java index e6218117215..ca5100baaea 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -69,7 +69,7 @@ public class SegmentMetadataUnionQueryTest new Object[]{ QueryRunnerTestHelper.makeUnionQueryRunner( FACTORY, - new QueryableIndexSegment(TestIndex.getMMappedTestIndex(), QueryRunnerTestHelper.segmentId), + new QueryableIndexSegment(TestIndex.getMMappedTestIndex(), QueryRunnerTestHelper.SEGMENT_ID), null ), true, @@ -77,7 +77,7 @@ public class SegmentMetadataUnionQueryTest new Object[]{ QueryRunnerTestHelper.makeUnionQueryRunner( FACTORY, - new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), QueryRunnerTestHelper.segmentId), + new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), QueryRunnerTestHelper.SEGMENT_ID), null ), false @@ -90,7 +90,7 @@ public class SegmentMetadataUnionQueryTest public void testSegmentMetadataUnionQuery() { SegmentAnalysis expected = new SegmentAnalysis( - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), ImmutableMap.of( "placement", @@ -112,8 +112,8 @@ public class SegmentMetadataUnionQueryTest null ); SegmentMetadataQuery query = new Druids.SegmentMetadataQueryBuilder() - .dataSource(QueryRunnerTestHelper.unionDataSource) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .toInclude(new ListColumnIncluderator(Collections.singletonList("placement"))) .analysisTypes( SegmentMetadataQuery.AnalysisType.CARDINALITY, diff --git a/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java b/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java index 1f64d122215..e664803e4cb 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/MultiSegmentScanQueryTest.java @@ -65,13 +65,13 @@ import java.util.List; @RunWith(Parameterized.class) public class MultiSegmentScanQueryTest { - private static final ScanQueryQueryToolChest toolChest = new ScanQueryQueryToolChest( + private static final ScanQueryQueryToolChest TOOL_CHEST = new ScanQueryQueryToolChest( new ScanQueryConfig(), DefaultGenericQueryMetricsFactory.instance() ); - private static final QueryRunnerFactory factory = new ScanQueryRunnerFactory( - toolChest, + private static final QueryRunnerFactory FACTORY = new ScanQueryRunnerFactory( + TOOL_CHEST, new ScanQueryEngine(), new ScanQueryConfig() ); @@ -132,7 +132,7 @@ public class MultiSegmentScanQueryTest private static SegmentId makeIdentifier(Interval interval, String version) { - return SegmentId.of(QueryRunnerTestHelper.dataSource, interval, version, NoneShardSpec.instance()); + return SegmentId.of(QueryRunnerTestHelper.DATA_SOURCE, interval, version, NoneShardSpec.instance()); } private static IncrementalIndex newIndex(String minTimeStamp) @@ -181,7 +181,7 @@ public class MultiSegmentScanQueryTest private Druids.ScanQueryBuilder newBuilder() { return Druids.newScanQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE)) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) .batchSize(batchSize) .columns(Collections.emptyList()) @@ -193,10 +193,10 @@ public class MultiSegmentScanQueryTest public void testMergeRunnersWithLimit() { ScanQuery query = newBuilder().build(); - List results = factory + List results = FACTORY .mergeRunners( Execs.directExecutor(), - ImmutableList.of(factory.createRunner(segment0), factory.createRunner(segment1)) + ImmutableList.of(FACTORY.createRunner(segment0), FACTORY.createRunner(segment1)) ) .run(QueryPlus.wrap(query)) .toList(); @@ -214,7 +214,7 @@ public class MultiSegmentScanQueryTest @Test public void testMergeResultsWithLimit() { - QueryRunner runner = toolChest.mergeResults( + QueryRunner runner = TOOL_CHEST.mergeResults( new QueryRunner() { @Override @@ -225,8 +225,8 @@ public class MultiSegmentScanQueryTest { // simulate results back from 2 historicals List> sequences = Lists.newArrayListWithExpectedSize(2); - sequences.add(factory.createRunner(segment0).run(queryPlus)); - sequences.add(factory.createRunner(segment1).run(queryPlus)); + sequences.add(FACTORY.createRunner(segment0).run(queryPlus)); + sequences.add(FACTORY.createRunner(segment1).run(queryPlus)); return new MergeSequence<>( queryPlus.getQuery().getResultOrdering(), Sequences.simple(sequences) diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryLimitRowIteratorTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryLimitRowIteratorTest.java index c03ddde9494..bc653c71895 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryLimitRowIteratorTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryLimitRowIteratorTest.java @@ -45,7 +45,7 @@ public class ScanQueryLimitRowIteratorTest private static int limit; private static List singleEventScanResultValues = new ArrayList<>(); private static List multiEventScanResultValues = new ArrayList<>(); - private static final ScanQuery.ResultFormat resultFormat = ScanQuery.ResultFormat.RESULT_FORMAT_LIST; + private static final ScanQuery.ResultFormat RESULT_FORMAT = ScanQuery.ResultFormat.RESULT_FORMAT_LIST; public ScanQueryLimitRowIteratorTest( final int batchSize, @@ -76,7 +76,7 @@ public class ScanQueryLimitRowIteratorTest singleEventScanResultValues.add( ScanQueryTestHelper.generateScanResultValue( ThreadLocalRandom.current().nextLong(), - resultFormat, + RESULT_FORMAT, 1 )); } @@ -84,14 +84,14 @@ public class ScanQueryLimitRowIteratorTest multiEventScanResultValues.add( ScanQueryTestHelper.generateScanResultValue( ThreadLocalRandom.current().nextLong(), - resultFormat, + RESULT_FORMAT, batchSize )); } multiEventScanResultValues.add( ScanQueryTestHelper.generateScanResultValue( ThreadLocalRandom.current().nextLong(), - resultFormat, + RESULT_FORMAT, NUM_ELEMENTS % batchSize )); } @@ -107,8 +107,8 @@ public class ScanQueryLimitRowIteratorTest .order(ScanQuery.Order.NONE) .dataSource("some datasource") .batchSize(batchSize) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .resultFormat(resultFormat) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .resultFormat(RESULT_FORMAT) .context(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false)) .build(); QueryPlus queryPlus = QueryPlus.wrap(query); @@ -148,8 +148,8 @@ public class ScanQueryLimitRowIteratorTest .order(ScanQuery.Order.DESCENDING) .dataSource("some datasource") .batchSize(batchSize) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .resultFormat(resultFormat) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .resultFormat(RESULT_FORMAT) .build(); QueryPlus queryPlus = QueryPlus.wrap(query); ScanQueryLimitRowIterator itr = new ScanQueryLimitRowIterator( @@ -187,8 +187,8 @@ public class ScanQueryLimitRowIteratorTest .order(ScanQuery.Order.DESCENDING) .dataSource("some datasource") .batchSize(batchSize) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .resultFormat(resultFormat) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .resultFormat(RESULT_FORMAT) .context(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false)) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerFactoryTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerFactoryTest.java index 287733d441b..8038d68a252 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerFactoryTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerFactoryTest.java @@ -53,7 +53,7 @@ import java.util.List; public class ScanQueryRunnerFactoryTest { - private static final ScanQueryRunnerFactory factory = new ScanQueryRunnerFactory( + private static final ScanQueryRunnerFactory FACTORY = new ScanQueryRunnerFactory( new ScanQueryQueryToolChest( new ScanQueryConfig(), DefaultGenericQueryMetricsFactory.instance() @@ -82,7 +82,7 @@ public class ScanQueryRunnerFactoryTest .batchSize(batchSize) .limit(limit) .order(order) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dataSource("some datasource") .resultFormat(resultFormat) .build(); @@ -137,7 +137,7 @@ public class ScanQueryRunnerFactoryTest }); Sequence inputSequence = Sequences.simple(srvs); try { - List output = factory.priorityQueueSortAndLimit( + List output = FACTORY.priorityQueueSortAndLimit( inputSequence, query, ImmutableList.of(new Interval( @@ -226,7 +226,7 @@ public class ScanQueryRunnerFactoryTest }); List output = - factory.nWayMergeAndLimit( + FACTORY.nWayMergeAndLimit( groupedRunners, QueryPlus.wrap(query), ResponseContext.createEmpty() @@ -284,11 +284,11 @@ public class ScanQueryRunnerFactoryTest ); QuerySegmentSpec singleSpecificSpec = new SpecificSegmentSpec(descriptor); - List intervals = factory.getIntervalsFromSpecificQuerySpec(multiSpecificSpec); + List intervals = FACTORY.getIntervalsFromSpecificQuerySpec(multiSpecificSpec); Assert.assertEquals(1, intervals.size()); Assert.assertEquals(descriptor.getInterval(), intervals.get(0)); - intervals = factory.getIntervalsFromSpecificQuerySpec(singleSpecificSpec); + intervals = FACTORY.getIntervalsFromSpecificQuerySpec(singleSpecificSpec); Assert.assertEquals(1, intervals.size()); Assert.assertEquals(descriptor.getInterval(), intervals.get(0)); } @@ -304,7 +304,7 @@ public class ScanQueryRunnerFactoryTest ) ) ); - factory.getIntervalsFromSpecificQuerySpec(multiIntervalSpec); + FACTORY.getIntervalsFromSpecificQuerySpec(multiIntervalSpec); } @Test(expected = UOE.class) @@ -316,7 +316,7 @@ public class ScanQueryRunnerFactoryTest DateTimes.of("2019-01-01").plusHours(1) ) ); - factory.getIntervalsFromSpecificQuerySpec(legacySpec); + FACTORY.getIntervalsFromSpecificQuerySpec(legacySpec); } } } diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java index 0507da3b2c1..0b9900a594f 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQueryRunnerTest.java @@ -112,7 +112,7 @@ public class ScanQueryRunnerTest ); public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class); - private static final ScanQueryQueryToolChest toolChest = new ScanQueryQueryToolChest( + private static final ScanQueryQueryToolChest TOOL_CHEST = new ScanQueryQueryToolChest( new ScanQueryConfig(), DefaultGenericQueryMetricsFactory.instance() ); @@ -123,7 +123,7 @@ public class ScanQueryRunnerTest return QueryRunnerTestHelper.cartesian( QueryRunnerTestHelper.makeQueryRunners( new ScanQueryRunnerFactory( - toolChest, + TOOL_CHEST, new ScanQueryEngine(), new ScanQueryConfig() ) @@ -144,9 +144,9 @@ public class ScanQueryRunnerTest private Druids.ScanQueryBuilder newTestQuery() { return Druids.newScanQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE)) .columns(Collections.emptyList()) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .limit(3) .legacy(legacy); } @@ -239,8 +239,8 @@ public class ScanQueryRunnerTest .intervals(I_0112_0114) .columns( ColumnHolder.TIME_COLUMN_NAME, - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) .build(); @@ -249,7 +249,7 @@ public class ScanQueryRunnerTest final List>> expectedEvents = toEvents( new String[]{ getTimestampName() + ":TIME", - QueryRunnerTestHelper.marketDimension + ":STRING", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", null, null, null, @@ -257,7 +257,7 @@ public class ScanQueryRunnerTest null, null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, V_0112_0114 ); @@ -287,7 +287,7 @@ public class ScanQueryRunnerTest { ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .columns(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.indexMetric) + .columns(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC) .build(); Iterable results = runner.run(QueryPlus.wrap(query)).toList(); @@ -296,7 +296,7 @@ public class ScanQueryRunnerTest toEvents( new String[]{ legacy ? getTimestampName() + ":TIME" : null, - QueryRunnerTestHelper.marketDimension + ":STRING", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", null, null, null, @@ -304,7 +304,7 @@ public class ScanQueryRunnerTest null, null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, V_0112_0114 ), @@ -320,7 +320,7 @@ public class ScanQueryRunnerTest { ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .columns(QueryRunnerTestHelper.marketDimension, QueryRunnerTestHelper.indexMetric) + .columns(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .build(); @@ -330,7 +330,7 @@ public class ScanQueryRunnerTest toEvents( new String[]{ legacy ? getTimestampName() + ":TIME" : null, - QueryRunnerTestHelper.marketDimension + ":STRING", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", null, null, null, @@ -338,7 +338,7 @@ public class ScanQueryRunnerTest null, null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, V_0112_0114 ), @@ -356,8 +356,8 @@ public class ScanQueryRunnerTest for (int limit : new int[]{3, 1, 5, 7, 0}) { ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) - .columns(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.indexMetric) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) + .columns(QueryRunnerTestHelper.QUALITY_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC) .limit(limit) .build(); @@ -367,10 +367,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : null, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, // filtered values with day granularity new String[]{ @@ -416,13 +416,13 @@ public class ScanQueryRunnerTest LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true); ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "replaced", lookupExtractionFn)) - .columns(QueryRunnerTestHelper.qualityDimension, QueryRunnerTestHelper.indexMetric) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "replaced", lookupExtractionFn)) + .columns(QueryRunnerTestHelper.QUALITY_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC) .build(); Iterable results = runner.run(QueryPlus.wrap(query)).toList(); - Iterable resultsOptimize = toolChest - .postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))) + Iterable resultsOptimize = TOOL_CHEST + .postMergeQueryDecoration(TOOL_CHEST.mergeResults(TOOL_CHEST.preMergeQueryDecoration(runner))) .run(QueryPlus.wrap(query)) .toList(); @@ -430,10 +430,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : null, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, // filtered values with day granularity new String[]{ @@ -450,11 +450,11 @@ public class ScanQueryRunnerTest events, legacy ? Lists.newArrayList( getTimestampName(), - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) : Lists.newArrayList( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ), 0, 3 @@ -472,8 +472,8 @@ public class ScanQueryRunnerTest .filters( new AndDimFilter( Arrays.asList( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "foo", null) ) ) ) @@ -518,11 +518,11 @@ public class ScanQueryRunnerTest for (int limit : new int[]{3, 1, 5, 7, 0}) { ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) .columns( - QueryRunnerTestHelper.timeDimension, - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.TIME_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) .limit(limit) .order(ScanQuery.Order.ASCENDING) @@ -556,10 +556,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, (String[]) ArrayUtils.addAll(seg1Results, seg2Results) ); @@ -582,13 +582,13 @@ public class ScanQueryRunnerTest ascendingEvents, legacy ? Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, getTimestampName(), "quality", "index" ) : Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, "quality", "index" ), @@ -606,11 +606,11 @@ public class ScanQueryRunnerTest for (int limit : new int[]{3, 1, 5, 7, 0}) { ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) .columns( - QueryRunnerTestHelper.timeDimension, - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.TIME_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) .limit(limit) .order(ScanQuery.Order.DESCENDING) @@ -645,10 +645,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, expectedRet ); @@ -669,14 +669,14 @@ public class ScanQueryRunnerTest descendingEvents, legacy ? Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, getTimestampName(), // getTimestampName() always returns the legacy timestamp when legacy is true "quality", "index" ) : Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, "quality", "index" ), @@ -717,11 +717,11 @@ public class ScanQueryRunnerTest /* Ascending */ ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) .columns( - QueryRunnerTestHelper.timeDimension, - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.TIME_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .order(ScanQuery.Order.ASCENDING) @@ -733,10 +733,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, (String[]) ArrayUtils.addAll(seg1Results, seg2Results) ); @@ -757,14 +757,14 @@ public class ScanQueryRunnerTest ascendingEvents, legacy ? Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, getTimestampName(), // getTimestampName() always returns the legacy timestamp when legacy is true "quality", "index" ) : Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, "quality", "index" ), @@ -806,11 +806,11 @@ public class ScanQueryRunnerTest /* Descending */ ScanQuery query = newTestQuery() .intervals(I_0112_0114) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) .columns( - QueryRunnerTestHelper.timeDimension, - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.indexMetric + QueryRunnerTestHelper.TIME_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.INDEX_METRIC ) .resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) .order(ScanQuery.Order.DESCENDING) @@ -825,10 +825,10 @@ public class ScanQueryRunnerTest new String[]{ legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME, null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":DOUBLE" + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE" }, expectedRet //segments in reverse order from above ); @@ -849,14 +849,14 @@ public class ScanQueryRunnerTest descendingEvents, legacy ? Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, getTimestampName(), // getTimestampName() always returns the legacy timestamp when legacy is true "quality", "index" ) : Lists.newArrayList( - QueryRunnerTestHelper.timeDimension, + QueryRunnerTestHelper.TIME_DIMENSION, "quality", "index" ), @@ -873,16 +873,16 @@ public class ScanQueryRunnerTest return toEvents( new String[]{ getTimestampName() + ":TIME", - QueryRunnerTestHelper.marketDimension + ":STRING", - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", "qualityLong" + ":LONG", "qualityFloat" + ":FLOAT", "qualityDouble" + ":DOUBLE", "qualityNumericString" + ":STRING", - QueryRunnerTestHelper.placementDimension + ":STRING", - QueryRunnerTestHelper.placementishDimension + ":STRINGS", - QueryRunnerTestHelper.indexMetric + ":DOUBLE", - QueryRunnerTestHelper.partialNullDimension + ":STRING", + QueryRunnerTestHelper.PLACEMENT_DIMENSION + ":STRING", + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION + ":STRINGS", + QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE", + QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION + ":STRING", "expr", "indexMin", "indexFloat", @@ -919,23 +919,23 @@ public class ScanQueryRunnerTest if (dimSpecs[i].equals(EXPR_COLUMN.getOutputName())) { event.put( EXPR_COLUMN.getOutputName(), - (double) event.get(QueryRunnerTestHelper.indexMetric) * 2 + (double) event.get(QueryRunnerTestHelper.INDEX_METRIC) * 2 ); continue; } else if (dimSpecs[i].equals("indexMin")) { - event.put("indexMin", (double) event.get(QueryRunnerTestHelper.indexMetric)); + event.put("indexMin", (double) event.get(QueryRunnerTestHelper.INDEX_METRIC)); continue; } else if (dimSpecs[i].equals("indexFloat")) { - event.put("indexFloat", (float) (double) event.get(QueryRunnerTestHelper.indexMetric)); + event.put("indexFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC)); continue; } else if (dimSpecs[i].equals("indexMaxPlusTen")) { - event.put("indexMaxPlusTen", (double) event.get(QueryRunnerTestHelper.indexMetric) + 10); + event.put("indexMaxPlusTen", (double) event.get(QueryRunnerTestHelper.INDEX_METRIC) + 10); continue; } else if (dimSpecs[i].equals("indexMinFloat")) { - event.put("indexMinFloat", (float) (double) event.get(QueryRunnerTestHelper.indexMetric)); + event.put("indexMinFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC)); continue; } else if (dimSpecs[i].equals("indexMaxFloat")) { - event.put("indexMaxFloat", (float) (double) event.get(QueryRunnerTestHelper.indexMetric)); + event.put("indexMaxFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC)); continue; } else if (dimSpecs[i].equals("quality_uniques")) { final HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); @@ -1002,7 +1002,7 @@ public class ScanQueryRunnerTest end = group.size(); } events.addAll(group.subList(offset, end)); - expected.add(new ScanResultValue(QueryRunnerTestHelper.segmentId.toString(), columns, events)); + expected.add(new ScanResultValue(QueryRunnerTestHelper.SEGMENT_ID.toString(), columns, events)); } return expected; } diff --git a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java index b49a1033f2e..661960b609f 100644 --- a/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/scan/ScanQuerySpecTest.java @@ -33,7 +33,7 @@ import java.util.Arrays; public class ScanQuerySpecTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testSerializationLegacyString() throws Exception @@ -62,7 +62,7 @@ public class ScanQuerySpecTest + "\"granularity\":{\"type\":\"all\"}}"; ScanQuery query = new ScanQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), VirtualColumns.EMPTY, ScanQuery.ResultFormat.RESULT_FORMAT_LIST, @@ -75,9 +75,9 @@ public class ScanQuerySpecTest null ); - String actual = jsonMapper.writeValueAsString(query); + String actual = JSON_MAPPER.writeValueAsString(query); Assert.assertEquals(current, actual); - Assert.assertEquals(query, jsonMapper.readValue(actual, ScanQuery.class)); - Assert.assertEquals(query, jsonMapper.readValue(legacy, ScanQuery.class)); + Assert.assertEquals(query, JSON_MAPPER.readValue(actual, ScanQuery.class)); + Assert.assertEquals(query, JSON_MAPPER.readValue(legacy, ScanQuery.class)); } } diff --git a/processing/src/test/java/org/apache/druid/query/search/DefaultSearchQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/search/DefaultSearchQueryMetricsTest.java index d3f4ee1fefa..e729a9cd317 100644 --- a/processing/src/test/java/org/apache/druid/query/search/DefaultSearchQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/DefaultSearchQueryMetricsTest.java @@ -49,9 +49,9 @@ public class DefaultSearchQueryMetricsTest ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter); SearchQuery query = Druids .newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions(new ListFilteredDimensionSpec( new DefaultDimensionSpec("tags", "tags"), ImmutableSet.of("t3"), @@ -70,9 +70,9 @@ public class DefaultSearchQueryMetricsTest Assert.assertTrue(actualEvent.containsKey("timestamp")); Assert.assertEquals("", actualEvent.get("host")); Assert.assertEquals("", actualEvent.get("service")); - Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); + Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - List expectedIntervals = QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(); + List expectedIntervals = QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(); List expectedStringIntervals = expectedIntervals.stream().map(Interval::toString).collect(Collectors.toList()); Assert.assertEquals(expectedStringIntervals, actualEvent.get(DruidMetrics.INTERVAL)); @@ -90,9 +90,9 @@ public class DefaultSearchQueryMetricsTest { SearchQuery query = Druids .newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); CachingEmitter cachingEmitter = new CachingEmitter(); diff --git a/processing/src/test/java/org/apache/druid/query/search/RegexSearchQueryTest.java b/processing/src/test/java/org/apache/druid/query/search/RegexSearchQueryTest.java index b59907331b1..bd120e38fd6 100644 --- a/processing/src/test/java/org/apache/druid/query/search/RegexSearchQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/RegexSearchQueryTest.java @@ -28,15 +28,15 @@ import java.io.IOException; public class RegexSearchQueryTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testQuerySerialization() throws IOException { RegexSearchQuerySpec spec = new RegexSearchQuerySpec("(upfront|total_market)"); - String json = jsonMapper.writeValueAsString(spec); - RegexSearchQuerySpec serdeQuery = (RegexSearchQuerySpec) jsonMapper.readValue(json, RegexSearchQuerySpec.class); + String json = JSON_MAPPER.writeValueAsString(spec); + RegexSearchQuerySpec serdeQuery = (RegexSearchQuerySpec) JSON_MAPPER.readValue(json, RegexSearchQuerySpec.class); Assert.assertEquals(spec, serdeQuery); } diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java index 3b573150b18..41c2c1a3ed3 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerTest.java @@ -76,12 +76,12 @@ import java.util.List; public class SearchQueryRunnerTest { private static final Logger LOG = new Logger(SearchQueryRunnerTest.class); - private static final SearchQueryConfig config = new SearchQueryConfig(); - private static final SearchQueryQueryToolChest toolChest = new SearchQueryQueryToolChest( - config, + private static final SearchQueryConfig CONFIG = new SearchQueryConfig(); + private static final SearchQueryQueryToolChest TOOL_CHEST = new SearchQueryQueryToolChest( + CONFIG, QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ); - private static final SearchStrategySelector selector = new SearchStrategySelector(Suppliers.ofInstance(config)); + private static final SearchStrategySelector SELECTOR = new SearchStrategySelector(Suppliers.ofInstance(CONFIG)); @Parameterized.Parameters(name = "{0}") public static Iterable constructorFeeder() @@ -89,8 +89,8 @@ public class SearchQueryRunnerTest return QueryRunnerTestHelper.transformToConstructionFeeder( QueryRunnerTestHelper.makeQueryRunners( new SearchQueryRunnerFactory( - selector, - toolChest, + SELECTOR, + TOOL_CHEST, QueryRunnerTestHelper.NOOP_QUERYWATCHER ) ) @@ -105,8 +105,8 @@ public class SearchQueryRunnerTest ) { this.runner = runner; - this.decoratedRunner = toolChest.postMergeQueryDecoration( - toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))); + this.decoratedRunner = TOOL_CHEST.postMergeQueryDecoration( + TOOL_CHEST.mergeResults(TOOL_CHEST.preMergeQueryDecoration(runner))); } @Test @@ -130,21 +130,21 @@ public class SearchQueryRunnerTest public void testSearch() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 279)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "travel", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "health", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION, "value", 186)); checkSearchQuery(searchQuery, expectedHits); } @@ -153,14 +153,14 @@ public class SearchQueryRunnerTest public void testSearchWithCardinality() { final SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(); // double the value - QueryRunner mergedRunner = toolChest.mergeResults( + QueryRunner mergedRunner = TOOL_CHEST.mergeResults( new QueryRunner>() { @Override @@ -181,14 +181,14 @@ public class SearchQueryRunnerTest ); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 91)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 273)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 91)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 91)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 91)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 182)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 91)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 182)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 91)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 273)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "travel", 91)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "health", 91)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", 91)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 182)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", 91)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION, "value", 182)); checkSearchQuery(searchQuery, mergedRunner, expectedHits); } @@ -197,22 +197,22 @@ public class SearchQueryRunnerTest public void testSearchSameValueInMultiDims() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions( Arrays.asList( - QueryRunnerTestHelper.placementDimension, - QueryRunnerTestHelper.placementishDimension + QueryRunnerTestHelper.PLACEMENT_DIMENSION, + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION ) ) .query("e") .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementDimension, "preferred", 1209)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "e", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "preferred", 1209)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENT_DIMENSION, "preferred", 1209)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "e", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "preferred", 1209)); checkSearchQuery(searchQuery, expectedHits); } @@ -221,13 +221,13 @@ public class SearchQueryRunnerTest public void testSearchSameValueInMultiDims2() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions( Arrays.asList( - QueryRunnerTestHelper.placementDimension, - QueryRunnerTestHelper.placementishDimension + QueryRunnerTestHelper.PLACEMENT_DIMENSION, + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION ) ) .sortSpec(new SearchSortSpec(StringComparators.STRLEN)) @@ -235,9 +235,9 @@ public class SearchQueryRunnerTest .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "e", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementDimension, "preferred", 1209)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "preferred", 1209)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "e", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENT_DIMENSION, "preferred", 1209)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "preferred", 1209)); checkSearchQuery(searchQuery, expectedHits); } @@ -246,14 +246,14 @@ public class SearchQueryRunnerTest public void testFragmentSearch() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query(new FragmentSearchQuerySpec(Arrays.asList("auto", "ve"))) .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); checkSearchQuery(searchQuery, expectedHits); } @@ -262,18 +262,18 @@ public class SearchQueryRunnerTest public void testSearchWithDimensionQuality() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 279)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "travel", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "health", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", 93)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimensions("quality") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -284,14 +284,14 @@ public class SearchQueryRunnerTest public void testSearchWithDimensionProvider() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimensions("market") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -302,24 +302,24 @@ public class SearchQueryRunnerTest public void testSearchWithDimensionsQualityAndProvider() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 279)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "travel", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "health", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimensions( Arrays.asList( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION ) ) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -330,19 +330,19 @@ public class SearchQueryRunnerTest public void testSearchWithDimensionsPlacementAndProvider() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimensions( Arrays.asList( - QueryRunnerTestHelper.placementishDimension, - QueryRunnerTestHelper.marketDimension + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION ) ) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("mark") .build(), expectedHits @@ -355,7 +355,7 @@ public class SearchQueryRunnerTest { final String automotiveSnowman = "automotive☃"; List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, automotiveSnowman, 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, automotiveSnowman, 93)); final LookupExtractionFn lookupExtractionFn = new LookupExtractionFn( new MapLookupExtractor(ImmutableMap.of("automotive", automotiveSnowman), false), @@ -366,20 +366,20 @@ public class SearchQueryRunnerTest ); SearchQuery query = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters( new ExtractionDimFilter( - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, automotiveSnowman, lookupExtractionFn, null ) ) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions( new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, null, lookupExtractionFn ) @@ -394,20 +394,20 @@ public class SearchQueryRunnerTest public void testSearchWithSingleFilter1() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 93)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters( new AndDimFilter( Arrays.asList( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "total_market", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "mezzanine", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", null) ))) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .dimensions(QueryRunnerTestHelper.qualityDimension) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION) .query("a") .build(), expectedHits @@ -418,15 +418,15 @@ public class SearchQueryRunnerTest public void testSearchWithSingleFilter2() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "total_market") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .dimensions(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(QueryRunnerTestHelper.MARKET_DIMENSION) .query("a") .build(), expectedHits @@ -437,20 +437,20 @@ public class SearchQueryRunnerTest public void testSearchMultiAndFilter() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); DimFilter filter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null) ); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters(filter) - .dimensions(QueryRunnerTestHelper.qualityDimension) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -461,20 +461,20 @@ public class SearchQueryRunnerTest public void testSearchWithMultiOrFilter() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); DimFilter filter = new OrDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "total_market", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", null) + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "total_market", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null) ); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimensions(QueryRunnerTestHelper.qualityDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimensions(QueryRunnerTestHelper.QUALITY_DIMENSION) .filters(filter) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -488,9 +488,9 @@ public class SearchQueryRunnerTest checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("abcd123") .build(), expectedHits @@ -503,16 +503,16 @@ public class SearchQueryRunnerTest List expectedHits = new ArrayList<>(); DimFilter filter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "total_market", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null) ); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters(filter) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(), expectedHits @@ -527,9 +527,9 @@ public class SearchQueryRunnerTest checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions("does_not_exist") .query("a") .build(), @@ -541,26 +541,26 @@ public class SearchQueryRunnerTest public void testSearchAll() { List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "spot", 837)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "upfront", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", 837)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", 186)); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .dimensions(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(QueryRunnerTestHelper.MARKET_DIMENSION) .query("") .build(), expectedHits ); checkSearchQuery( Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .dimensions(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(QueryRunnerTestHelper.MARKET_DIMENSION) .build(), expectedHits ); @@ -570,22 +570,22 @@ public class SearchQueryRunnerTest public void testSearchWithNumericSort() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .sortSpec(new SearchSortSpec(StringComparators.NUMERIC)) .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.placementishDimension, "a", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "automotive", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "entertainment", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "health", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "mezzanine", 279)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.marketDimension, "total_market", 186)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.qualityDimension, "travel", 93)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.partialNullDimension, "value", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "health", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", 279)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", 186)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.QUALITY_DIMENSION, "travel", 93)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION, "value", 186)); checkSearchQuery(searchQuery, expectedHits); } @@ -594,9 +594,9 @@ public class SearchQueryRunnerTest public void testSearchOnTime() { SearchQuery searchQuery = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("Friday") .dimensions(new ExtractionDimensionSpec( ColumnHolder.TIME_COLUMN_NAME, @@ -628,9 +628,9 @@ public class SearchQueryRunnerTest ValueType.LONG ) ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("1297123200000") .build(); @@ -653,9 +653,9 @@ public class SearchQueryRunnerTest jsExtractionFn ) ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("1297123200000") .build(); @@ -670,20 +670,20 @@ public class SearchQueryRunnerTest SearchQuery searchQuery = Druids.newSearchQueryBuilder() .dimensions( new DefaultDimensionSpec( - QueryRunnerTestHelper.indexMetric, - QueryRunnerTestHelper.indexMetric, + QueryRunnerTestHelper.INDEX_METRIC, + QueryRunnerTestHelper.INDEX_METRIC, ValueType.DOUBLE ) ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("100.7") .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.indexMetric, "100.706057", 1)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.indexMetric, "100.775597", 1)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.INDEX_METRIC, "100.706057", 1)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.INDEX_METRIC, "100.775597", 1)); checkSearchQuery(searchQuery, expectedHits); } @@ -696,20 +696,20 @@ public class SearchQueryRunnerTest SearchQuery searchQuery = Druids.newSearchQueryBuilder() .dimensions( new ExtractionDimensionSpec( - QueryRunnerTestHelper.indexMetric, - QueryRunnerTestHelper.indexMetric, + QueryRunnerTestHelper.INDEX_METRIC, + QueryRunnerTestHelper.INDEX_METRIC, jsExtractionFn ) ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("100.7") .build(); List expectedHits = new ArrayList<>(); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.indexMetric, "super-100.706057", 1)); - expectedHits.add(new SearchHit(QueryRunnerTestHelper.indexMetric, "super-100.775597", 1)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.INDEX_METRIC, "super-100.706057", 1)); + expectedHits.add(new SearchHit(QueryRunnerTestHelper.INDEX_METRIC, "super-100.775597", 1)); checkSearchQuery(searchQuery, expectedHits); } @@ -744,16 +744,16 @@ public class SearchQueryRunnerTest .dimensions( new DefaultDimensionSpec("table", "table") ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) // simulate when cardinality is big enough to fallback to cursorOnly strategy .context(ImmutableMap.of("searchStrategy", "cursorOnly")) .build(); QueryRunnerFactory factory = new SearchQueryRunnerFactory( - selector, - toolChest, + SELECTOR, + TOOL_CHEST, QueryRunnerTestHelper.NOOP_QUERYWATCHER ); QueryRunner runner = factory.createRunner( @@ -772,9 +772,9 @@ public class SearchQueryRunnerTest .dimensions( new DefaultDimensionSpec("asdf", "asdf") ) - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); List noHit = new ArrayList<>(); diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java index 855f50d42e4..1bbf11e401c 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -134,9 +134,9 @@ public class SearchQueryRunnerWithCaseTest private Druids.SearchQueryBuilder testBuilder() { return Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec); + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC); } @Test @@ -147,15 +147,15 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; searchQuery = builder.query("SPOT").build(); - expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot", "SPot")); + expectedResults.put(QueryRunnerTestHelper.MARKET_DIMENSION, Sets.newHashSet("spot", "SPot")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("spot", true).build(); - expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot")); + expectedResults.put(QueryRunnerTestHelper.MARKET_DIMENSION, Sets.newHashSet("spot")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("SPot", true).build(); - expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("SPot")); + expectedResults.put(QueryRunnerTestHelper.MARKET_DIMENSION, Sets.newHashSet("SPot")); checkSearchQuery(searchQuery, expectedResults); } @@ -165,22 +165,22 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() .dimensions(Arrays.asList( - QueryRunnerTestHelper.placementDimension, - QueryRunnerTestHelper.placementishDimension + QueryRunnerTestHelper.PLACEMENT_DIMENSION, + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION )); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("PREFERRED").build(); expectedResults.put( - QueryRunnerTestHelper.placementDimension, + QueryRunnerTestHelper.PLACEMENT_DIMENSION, Sets.newHashSet("PREFERRED", "preferred", "PREFERRed") ); - expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred", "Preferred")); + expectedResults.put(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, Sets.newHashSet("preferred", "Preferred")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("preferred", true).build(); - expectedResults.put(QueryRunnerTestHelper.placementDimension, Sets.newHashSet("preferred")); - expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred")); + expectedResults.put(QueryRunnerTestHelper.PLACEMENT_DIMENSION, Sets.newHashSet("preferred")); + expectedResults.put(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, Sets.newHashSet("preferred")); checkSearchQuery(searchQuery, expectedResults); } @@ -189,12 +189,12 @@ public class SearchQueryRunnerWithCaseTest { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension)) + .dimensions(Collections.singletonList(QueryRunnerTestHelper.QUALITY_DIMENSION)) .intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z"); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("otive").build(); - expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("AutoMotive")); + expectedResults.put(QueryRunnerTestHelper.QUALITY_DIMENSION, Sets.newHashSet("AutoMotive")); checkSearchQuery(searchQuery, expectedResults); } @@ -203,12 +203,12 @@ public class SearchQueryRunnerWithCaseTest { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension)) + .dimensions(Collections.singletonList(QueryRunnerTestHelper.QUALITY_DIMENSION)) .intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z"); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("business").build(); - expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet<>()); + expectedResults.put(QueryRunnerTestHelper.QUALITY_DIMENSION, new HashSet<>()); checkSearchQuery(searchQuery, expectedResults); } @@ -220,11 +220,11 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build(); - expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive", "AutoMotive")); + expectedResults.put(QueryRunnerTestHelper.QUALITY_DIMENSION, Sets.newHashSet("automotive", "AutoMotive")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.fragments(Arrays.asList("auto", "ve"), true).build(); - expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive")); + expectedResults.put(QueryRunnerTestHelper.QUALITY_DIMENSION, Sets.newHashSet("automotive")); checkSearchQuery(searchQuery, expectedResults); } diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java index 7dda00f4f1e..40fadcc3522 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryTest.java @@ -33,20 +33,20 @@ import java.io.IOException; public class SearchQueryTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testQuerySerialization() throws IOException { Query query = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .query("a") .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } @@ -55,25 +55,25 @@ public class SearchQueryTest public void testEquals() { Query query1 = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions( new DefaultDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION ) ) .query("a") .build(); Query query2 = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .dimensions( new DefaultDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION ) ) .query("a") @@ -86,23 +86,23 @@ public class SearchQueryTest public void testSerDe() throws IOException { Query query = Druids.newSearchQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.qualityDimension)) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .dimensions(new LegacyDimensionSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) .query("a") .build(); final String json = "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"" - + QueryRunnerTestHelper.qualityDimension + + QueryRunnerTestHelper.QUALITY_DIMENSION + "\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}"; - final Query serdeQuery = jsonMapper.readValue(json, Query.class); + final Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query.toString(), serdeQuery.toString()); Assert.assertEquals(query, serdeQuery); final String json2 = "{\"queryType\":\"search\",\"dataSource\":{\"type\":\"table\",\"name\":\"testing\"},\"filter\":null,\"granularity\":{\"type\":\"all\"},\"limit\":1000,\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"]},\"searchDimensions\":[\"quality\"],\"query\":{\"type\":\"insensitive_contains\",\"value\":\"a\"},\"sort\":{\"type\":\"lexicographic\"},\"context\":null}"; - final Query serdeQuery2 = jsonMapper.readValue(json2, Query.class); + final Query serdeQuery2 = JSON_MAPPER.readValue(json2, Query.class); Assert.assertEquals(query.toString(), serdeQuery2.toString()); Assert.assertEquals(query, serdeQuery2); diff --git a/processing/src/test/java/org/apache/druid/query/select/DefaultSelectQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/select/DefaultSelectQueryMetricsTest.java index 71ffbee1ba1..3cb569ddc42 100644 --- a/processing/src/test/java/org/apache/druid/query/select/DefaultSelectQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/DefaultSelectQueryMetricsTest.java @@ -46,9 +46,9 @@ public class DefaultSelectQueryMetricsTest ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter); SelectQuery query = Druids .newSelectQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .descending(true) .pagingSpec(PagingSpec.newSpec(1)) .build(); @@ -64,9 +64,9 @@ public class DefaultSelectQueryMetricsTest Assert.assertTrue(actualEvent.containsKey("timestamp")); Assert.assertEquals("", actualEvent.get("host")); Assert.assertEquals("", actualEvent.get("service")); - Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); + Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - List expectedIntervals = QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(); + List expectedIntervals = QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(); List expectedStringIntervals = expectedIntervals.stream().map(Interval::toString).collect(Collectors.toList()); Assert.assertEquals(expectedStringIntervals, actualEvent.get(DruidMetrics.INTERVAL)); @@ -86,9 +86,9 @@ public class DefaultSelectQueryMetricsTest ServiceEmitter serviceEmitter = new ServiceEmitter("", "", cachingEmitter); SelectQuery query = Druids .newSelectQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .descending(true) .pagingSpec(PagingSpec.newSpec(1)) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java b/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java index 07cc52f9089..8534f9061a8 100644 --- a/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/MultiSegmentSelectQueryTest.java @@ -71,15 +71,15 @@ import java.util.Map; @RunWith(Parameterized.class) public class MultiSegmentSelectQueryTest { - private static final Supplier configSupplier = Suppliers.ofInstance(new SelectQueryConfig(true)); + private static final Supplier CONFIG_SUPPLIER = Suppliers.ofInstance(new SelectQueryConfig(true)); - private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest( + private static final SelectQueryQueryToolChest TOOL_CHEST = new SelectQueryQueryToolChest( new DefaultObjectMapper(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ); - private static final QueryRunnerFactory factory = new SelectQueryRunnerFactory( - toolChest, + private static final QueryRunnerFactory FACTORY = new SelectQueryRunnerFactory( + TOOL_CHEST, new SelectQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); @@ -169,7 +169,7 @@ public class MultiSegmentSelectQueryTest segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion()).toString()); } - runner = QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); + runner = QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, FACTORY); } private static SegmentId makeIdentifier(IncrementalIndex index, String version) @@ -179,7 +179,7 @@ public class MultiSegmentSelectQueryTest private static SegmentId makeIdentifier(Interval interval, String version) { - return SegmentId.of(QueryRunnerTestHelper.dataSource, interval, version, NoneShardSpec.instance()); + return SegmentId.of(QueryRunnerTestHelper.DATA_SOURCE, interval, version, NoneShardSpec.instance()); } private static IncrementalIndex newIndex(String minTimeStamp) @@ -224,10 +224,10 @@ public class MultiSegmentSelectQueryTest private Druids.SelectQueryBuilder newBuilder() { return Druids.newSelectQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE)) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) - .granularity(QueryRunnerTestHelper.allGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions)) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.DIMENSIONS)) .pagingSpec(PagingSpec.newSpec(3)); } @@ -277,7 +277,7 @@ public class MultiSegmentSelectQueryTest public void testDayGranularity() { runDayGranularityTest( - newBuilder().granularity(QueryRunnerTestHelper.dayGran).build(), + newBuilder().granularity(QueryRunnerTestHelper.DAY_GRAN).build(), new int[][]{ {2, -1, -1, 2, 3, 0, 0, 3}, {3, 1, -1, 5, 1, 2, 0, 3}, {-1, 3, 0, 8, 0, 2, 1, 3}, {-1, -1, 3, 11, 0, 0, 3, 3}, {-1, -1, 4, 12, 0, 0, 1, 1}, {-1, -1, 5, 13, 0, 0, 0, 0} @@ -285,7 +285,7 @@ public class MultiSegmentSelectQueryTest ); runDayGranularityTest( - newBuilder().granularity(QueryRunnerTestHelper.dayGran).descending(true).build(), + newBuilder().granularity(QueryRunnerTestHelper.DAY_GRAN).descending(true).build(), new int[][]{ {0, 0, -3, -3, 0, 0, 3, 3}, {0, -1, -5, -6, 0, 1, 2, 3}, {0, -4, 0, -9, 0, 3, 0, 3}, {-3, 0, 0, -12, 3, 0, 0, 3}, {-4, 0, 0, -13, 1, 0, 0, 1}, {-5, 0, 0, -14, 0, 0, 0, 0} @@ -325,14 +325,14 @@ public class MultiSegmentSelectQueryTest .dataSource( new UnionDataSource( ImmutableList.of( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new TableDataSource("testing-2") ) ) ) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) - .granularity(QueryRunnerTestHelper.allGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.dimensions)) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.DIMENSIONS)) .pagingSpec(PagingSpec.newSpec(3)); SelectQuery query = selectQueryBuilder.build(); diff --git a/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java b/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java index ab718a40a03..19bf08e9538 100644 --- a/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/SelectBinaryFnTest.java @@ -40,9 +40,9 @@ import java.util.Set; */ public class SelectBinaryFnTest { - private static final String segmentId1 = "testSegment1"; + private static final String SEGMENT_ID1 = "testSegment1"; - private static final String segmentId2 = "testSegment2"; + private static final String SEGMENT_ID2 = "testSegment2"; @Test public void testApply() @@ -57,30 +57,30 @@ public class SelectBinaryFnTest Sets.newHashSet("sixth"), Arrays.asList( new EventHolder( - segmentId1, + SEGMENT_ID1, 0, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "first" ) ), new EventHolder( - segmentId1, + SEGMENT_ID1, 1, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T03"), "dim", "fourth" ) ), new EventHolder( - segmentId1, + SEGMENT_ID1, 2, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T05"), "dim", "sixth" @@ -99,30 +99,30 @@ public class SelectBinaryFnTest Sets.newHashSet("fifth"), Arrays.asList( new EventHolder( - segmentId2, + SEGMENT_ID2, 0, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "second" ) ), new EventHolder( - segmentId2, + SEGMENT_ID2, 1, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T02"), "dim", "third" ) ), new EventHolder( - segmentId2, + SEGMENT_ID2, 2, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T04"), "dim", "fifth" @@ -137,8 +137,8 @@ public class SelectBinaryFnTest Assert.assertEquals(res1.getTimestamp(), merged.getTimestamp()); LinkedHashMap expectedPageIds = Maps.newLinkedHashMap(); - expectedPageIds.put(segmentId1, 1); - expectedPageIds.put(segmentId2, 2); + expectedPageIds.put(SEGMENT_ID1, 1); + expectedPageIds.put(SEGMENT_ID2, 2); Iterator exSegmentIter = expectedPageIds.keySet().iterator(); Iterator acSegmentIter = merged.getValue().getPagingIdentifiers().keySet().iterator(); @@ -152,48 +152,48 @@ public class SelectBinaryFnTest List exEvents = Arrays.asList( new EventHolder( - segmentId1, + SEGMENT_ID1, 0, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "first" ) ), new EventHolder( - segmentId2, + SEGMENT_ID2, 0, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "second" ) ), new EventHolder( - segmentId2, + SEGMENT_ID2, 1, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T02"), "dim", "third" ) ), new EventHolder( - segmentId1, + SEGMENT_ID1, 1, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T03"), "dim", "fourth" ) ), new EventHolder( - segmentId2, + SEGMENT_ID2, 2, ImmutableMap.of( - EventHolder.timestampKey, + EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T04"), "dim", "fifth" @@ -220,9 +220,9 @@ public class SelectBinaryFnTest Sets.newHashSet("eight", "nineth"), Collections.singletonList( new EventHolder( - segmentId1, + SEGMENT_ID1, 0, - ImmutableMap.of(EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", "first") + ImmutableMap.of(EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "first") ) ) ) @@ -236,9 +236,9 @@ public class SelectBinaryFnTest Sets.newHashSet("seventh"), Collections.singletonList( new EventHolder( - segmentId2, + SEGMENT_ID2, 0, - ImmutableMap.of(EventHolder.timestampKey, DateTimes.of("2013-01-01T00"), "dim", "second") + ImmutableMap.of(EventHolder.TIMESTAMP_KEY, DateTimes.of("2013-01-01T00"), "dim", "second") ) ) ) diff --git a/processing/src/test/java/org/apache/druid/query/select/SelectQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/select/SelectQueryQueryToolChestTest.java index 2f8cfd6fbab..540bdc3722f 100644 --- a/processing/src/test/java/org/apache/druid/query/select/SelectQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/SelectQueryQueryToolChestTest.java @@ -34,9 +34,9 @@ import java.util.Collections; public class SelectQueryQueryToolChestTest { - private static final Supplier configSupplier = Suppliers.ofInstance(new SelectQueryConfig(true)); + private static final Supplier CONFIG_SUPPLIER = Suppliers.ofInstance(new SelectQueryConfig(true)); - private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest( + private static final SelectQueryQueryToolChest TOOL_CHEST = new SelectQueryQueryToolChest( new DefaultObjectMapper(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ); @@ -48,7 +48,7 @@ public class SelectQueryQueryToolChestTest .dataSource("dummy") .dimensions(Collections.singletonList("testDim")) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .pagingSpec(PagingSpec.newSpec(3)) .descending(false) .build(); @@ -57,14 +57,14 @@ public class SelectQueryQueryToolChestTest .dataSource("dummy") .dimensions(Collections.singletonList("testDim")) .intervals(SelectQueryRunnerTest.I_0112_0114_SPEC) - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .pagingSpec(PagingSpec.newSpec(3)) .descending(true) .build(); - final CacheStrategy, Object, SelectQuery> strategy1 = toolChest.getCacheStrategy(query1); + final CacheStrategy, Object, SelectQuery> strategy1 = TOOL_CHEST.getCacheStrategy(query1); Assert.assertNotNull(strategy1); - final CacheStrategy, Object, SelectQuery> strategy2 = toolChest.getCacheStrategy(query2); + final CacheStrategy, Object, SelectQuery> strategy2 = TOOL_CHEST.getCacheStrategy(query2); Assert.assertNotNull(strategy2); Assert.assertFalse(Arrays.equals(strategy1.computeCacheKey(query1), strategy2.computeCacheKey(query2))); diff --git a/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java index 16d46a8263d..e4f8a393ce2 100644 --- a/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/SelectQueryRunnerTest.java @@ -112,22 +112,22 @@ public class SelectQueryRunnerTest private static final Interval I_0112_0114 = Intervals.of("2011-01-12/2011-01-14"); public static final QuerySegmentSpec I_0112_0114_SPEC = new LegacySegmentSpec(I_0112_0114); - private static final SegmentId SEGMENT_ID_I_0112_0114 = QueryRunnerTestHelper.segmentId.withInterval(I_0112_0114); + private static final SegmentId SEGMENT_ID_I_0112_0114 = QueryRunnerTestHelper.SEGMENT_ID.withInterval(I_0112_0114); - private static final String segmentIdString = SEGMENT_ID_I_0112_0114.toString(); + private static final String SEGMENT_ID_STRING = SEGMENT_ID_I_0112_0114.toString(); public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class); private static final boolean DEFAULT_FROM_NEXT = true; - private static final SelectQueryConfig config = new SelectQueryConfig(true); + private static final SelectQueryConfig CONFIG = new SelectQueryConfig(true); static { - config.setEnableFromNextDefault(DEFAULT_FROM_NEXT); + CONFIG.setEnableFromNextDefault(DEFAULT_FROM_NEXT); } - private static final Supplier configSupplier = Suppliers.ofInstance(config); + private static final Supplier CONFIG_SUPPLIER = Suppliers.ofInstance(CONFIG); - private static final SelectQueryQueryToolChest toolChest = new SelectQueryQueryToolChest( + private static final SelectQueryQueryToolChest TOOL_CHEST = new SelectQueryQueryToolChest( new DefaultObjectMapper(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ); @@ -138,7 +138,7 @@ public class SelectQueryRunnerTest return QueryRunnerTestHelper.cartesian( QueryRunnerTestHelper.makeQueryRunners( new SelectQueryRunnerFactory( - toolChest, + TOOL_CHEST, new SelectQueryEngine(), QueryRunnerTestHelper.NOOP_QUERYWATCHER ) @@ -158,11 +158,11 @@ public class SelectQueryRunnerTest private Druids.SelectQueryBuilder newTestQuery() { return Druids.newSelectQueryBuilder() - .dataSource(new TableDataSource(QueryRunnerTestHelper.dataSource)) + .dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE)) .dimensionSpecs(DefaultDimensionSpec.toSpec(Collections.emptyList())) .metrics(Collections.emptyList()) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .granularity(QueryRunnerTestHelper.allGran) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .pagingSpec(PagingSpec.newSpec(3)) .descending(descending); } @@ -176,9 +176,9 @@ public class SelectQueryRunnerTest Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, toFullEvents(V_0112_0114), Lists.newArrayList( "market", @@ -260,20 +260,20 @@ public class SelectQueryRunnerTest SelectQuery query = newTestQuery() .dimensionSpecs( Arrays.asList( - new DefaultDimensionSpec(QueryRunnerTestHelper.marketDimension, "mar"), + new DefaultDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, "mar"), new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, "qual", new LookupExtractionFn(new MapLookupExtractor(map, true), false, null, true, false) ), - new DefaultDimensionSpec(QueryRunnerTestHelper.placementDimension, "place") + new DefaultDimensionSpec(QueryRunnerTestHelper.PLACEMENT_DIMENSION, "place") ) ) .build(); Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); - String segmentIdInThisQuery = QueryRunnerTestHelper.segmentId.toString(); + String segmentIdInThisQuery = QueryRunnerTestHelper.SEGMENT_ID.toString(); List> expectedResultsAsc = Collections.singletonList( new Result( @@ -289,33 +289,33 @@ public class SelectQueryRunnerTest segmentIdInThisQuery, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "automotive0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .build() ), new EventHolder( segmentIdInThisQuery, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "business0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .build() ), new EventHolder( segmentIdInThisQuery, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("mar", "spot") .put("qual", "entertainment0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .build() ) ) @@ -337,33 +337,33 @@ public class SelectQueryRunnerTest segmentIdInThisQuery, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "premium0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 780.27197265625F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 780.27197265625F) .build() ), new EventHolder( segmentIdInThisQuery, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "upfront") .put("qual", "mezzanine0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 962.731201171875F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 962.731201171875F) .build() ), new EventHolder( segmentIdInThisQuery, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-04-15T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-04-15T00:00:00.000Z")) .put("mar", "total_market") .put("qual", "premium0") .put("place", "preferred") - .put(QueryRunnerTestHelper.indexMetric, 1029.0570068359375F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1029.0570068359375F) .build() ) ) @@ -379,26 +379,26 @@ public class SelectQueryRunnerTest { SelectQuery query = newTestQuery() .intervals(I_0112_0114_SPEC) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.marketDimension)) - .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.MARKET_DIMENSION)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC)) .build(); Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", - QueryRunnerTestHelper.marketDimension + ":STRING", + EventHolder.TIMESTAMP_KEY + ":TIME", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", null, null, null, null, null, null, - QueryRunnerTestHelper.indexMetric + ":FLOAT" + QueryRunnerTestHelper.INDEX_METRIC + ":FLOAT" }, V_0112_0114 ), @@ -415,19 +415,19 @@ public class SelectQueryRunnerTest { SelectQuery query = newTestQuery() .intervals(I_0112_0114_SPEC) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC)) .pagingSpec(new PagingSpec(toPagingIdentifier(3, descending), 3)) .build(); Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", + EventHolder.TIMESTAMP_KEY + ":TIME", "foo:NULL", "foo2:NULL" }, @@ -448,10 +448,10 @@ public class SelectQueryRunnerTest for (int[] param : new int[][]{{3, 3}, {0, 1}, {5, 5}, {2, 7}, {3, 0}}) { SelectQuery query = newTestQuery() .intervals(I_0112_0114_SPEC) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null)) - .granularity(QueryRunnerTestHelper.dayGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null)) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC)) .pagingSpec(new PagingSpec(toPagingIdentifier(param[0], descending), param[1])) .build(); @@ -459,12 +459,12 @@ public class SelectQueryRunnerTest final List>> events = toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", + EventHolder.TIMESTAMP_KEY + ":TIME", null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":FLOAT" + QueryRunnerTestHelper.INDEX_METRIC + ":FLOAT" }, // filtered values with day granularity new String[]{ @@ -491,9 +491,9 @@ public class SelectQueryRunnerTest } ); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, events, Collections.singletonList("quality"), Collections.singletonList("index"), @@ -513,14 +513,14 @@ public class SelectQueryRunnerTest .filters( new AndDimFilter( Arrays.asList( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), new BoundDimFilter("expr", "11.1", null, false, false, null, null, StringComparators.NUMERIC) ) ) ) - .granularity(QueryRunnerTestHelper.allGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC)) .pagingSpec(new PagingSpec(null, 10, true)) .virtualColumns( new ExpressionVirtualColumn("expr", "index / 10.0", ValueType.FLOAT, TestExprMacroTable.INSTANCE) @@ -531,12 +531,12 @@ public class SelectQueryRunnerTest final List>> events = toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", + EventHolder.TIMESTAMP_KEY + ":TIME", null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":FLOAT" + QueryRunnerTestHelper.INDEX_METRIC + ":FLOAT" }, // filtered values with all granularity new String[]{ @@ -545,7 +545,7 @@ public class SelectQueryRunnerTest } ); - String segmentIdInThisQuery = QueryRunnerTestHelper.segmentId.withInterval(interval).toString(); + String segmentIdInThisQuery = QueryRunnerTestHelper.SEGMENT_ID.withInterval(interval).toString(); PagingOffset offset = query.getPagingOffset(segmentIdInThisQuery); List> expectedResults = toExpected( segmentIdInThisQuery, @@ -568,26 +568,26 @@ public class SelectQueryRunnerTest LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true); SelectQuery query = newTestQuery() .intervals(I_0112_0114_SPEC) - .filters(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "replaced", lookupExtractionFn)) - .granularity(QueryRunnerTestHelper.dayGran) - .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.qualityDimension)) - .metrics(Collections.singletonList(QueryRunnerTestHelper.indexMetric)) + .filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "replaced", lookupExtractionFn)) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .dimensionSpecs(DefaultDimensionSpec.toSpec(QueryRunnerTestHelper.QUALITY_DIMENSION)) + .metrics(Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC)) .build(); Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); - Iterable> resultsOptimize = toolChest - .postMergeQueryDecoration(toolChest.mergeResults(toolChest.preMergeQueryDecoration(runner))) + Iterable> resultsOptimize = TOOL_CHEST + .postMergeQueryDecoration(TOOL_CHEST.mergeResults(TOOL_CHEST.preMergeQueryDecoration(runner))) .run(QueryPlus.wrap(query)) .toList(); final List>> events = toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", + EventHolder.TIMESTAMP_KEY + ":TIME", null, - QueryRunnerTestHelper.qualityDimension + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", null, null, - QueryRunnerTestHelper.indexMetric + ":FLOAT" + QueryRunnerTestHelper.INDEX_METRIC + ":FLOAT" }, // filtered values with day granularity new String[]{ @@ -600,12 +600,12 @@ public class SelectQueryRunnerTest } ); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, events, - Collections.singletonList(QueryRunnerTestHelper.qualityDimension), - Collections.singletonList(QueryRunnerTestHelper.indexMetric), + Collections.singletonList(QueryRunnerTestHelper.QUALITY_DIMENSION), + Collections.singletonList(QueryRunnerTestHelper.INDEX_METRIC), offset.startOffset(), offset.threshold() ); @@ -622,8 +622,8 @@ public class SelectQueryRunnerTest .filters( new AndDimFilter( Arrays.asList( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "foo", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "foo", null) ) ) ) @@ -678,16 +678,16 @@ public class SelectQueryRunnerTest final List>> events = toEvents( new String[]{ - EventHolder.timestampKey + ":TIME", + EventHolder.TIMESTAMP_KEY + ":TIME", "foo:NULL", "foo2:NULL" }, V_0112_0114 ); - PagingOffset offset = query.getPagingOffset(segmentIdString); + PagingOffset offset = query.getPagingOffset(SEGMENT_ID_STRING); List> expectedResults = toExpected( - segmentIdString, + SEGMENT_ID_STRING, events, Collections.singletonList("foo"), Collections.singletonList("foo2"), @@ -701,7 +701,7 @@ public class SelectQueryRunnerTest public void testFullOnSelectWithLongAndFloat() { List dimSpecs = Arrays.asList( - new DefaultDimensionSpec(QueryRunnerTestHelper.indexMetric, "floatIndex", ValueType.FLOAT), + new DefaultDimensionSpec(QueryRunnerTestHelper.INDEX_METRIC, "floatIndex", ValueType.FLOAT), new DefaultDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "longTime", ValueType.LONG) ); @@ -717,40 +717,40 @@ public class SelectQueryRunnerTest new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( - ImmutableMap.of(segmentIdString, 2), + ImmutableMap.of(SEGMENT_ID_STRING, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), Sets.newHashSet("__time", "index"), Arrays.asList( new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", 1294790400000L) .put("floatIndex", 100.0f) - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ) @@ -763,40 +763,40 @@ public class SelectQueryRunnerTest new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( - ImmutableMap.of(segmentIdString, -3), + ImmutableMap.of(SEGMENT_ID_STRING, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), Sets.newHashSet("__time", "index"), Arrays.asList( new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1564.6177f) - .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1564.6177f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 826.0602f) - .put(QueryRunnerTestHelper.indexMetric, 826.0602f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 826.0602f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", 1294876800000L) .put("floatIndex", 1689.0128f) - .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1689.0128f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ) @@ -815,7 +815,7 @@ public class SelectQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); List dimSpecs = Arrays.asList( - new ExtractionDimensionSpec(QueryRunnerTestHelper.indexMetric, "floatIndex", jsExtractionFn), + new ExtractionDimensionSpec(QueryRunnerTestHelper.INDEX_METRIC, "floatIndex", jsExtractionFn), new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "longTime", jsExtractionFn) ); @@ -831,40 +831,40 @@ public class SelectQueryRunnerTest new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( - ImmutableMap.of(segmentIdString, 2), + ImmutableMap.of(SEGMENT_ID_STRING, 2), Sets.newHashSet("null_column", "floatIndex", "longTime"), Sets.newHashSet("__time", "index"), Arrays.asList( new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 0, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, 2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-12T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-12T00:00:00.000Z")) .put("longTime", "super-1294790400000") .put("floatIndex", "super-100") - .put(QueryRunnerTestHelper.indexMetric, 100.000000F) + .put(QueryRunnerTestHelper.INDEX_METRIC, 100.000000F) .put(ColumnHolder.TIME_COLUMN_NAME, 1294790400000L) .build() ) @@ -877,40 +877,40 @@ public class SelectQueryRunnerTest new Result( DateTimes.of("2011-01-12T00:00:00.000Z"), new SelectResultValue( - ImmutableMap.of(segmentIdString, -3), + ImmutableMap.of(SEGMENT_ID_STRING, -3), Sets.newHashSet("null_column", "floatIndex", "longTime"), Sets.newHashSet("__time", "index"), Arrays.asList( new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -1, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1564.617729") - .put(QueryRunnerTestHelper.indexMetric, 1564.6177f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1564.6177f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -2, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-826.060182") - .put(QueryRunnerTestHelper.indexMetric, 826.0602f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 826.0602f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ), new EventHolder( - segmentIdString, + SEGMENT_ID_STRING, -3, new ImmutableMap.Builder() - .put(EventHolder.timestampKey, DateTimes.of("2011-01-13T00:00:00.000Z")) + .put(EventHolder.TIMESTAMP_KEY, DateTimes.of("2011-01-13T00:00:00.000Z")) .put("longTime", "super-1294876800000") .put("floatIndex", "super-1689.012875") - .put(QueryRunnerTestHelper.indexMetric, 1689.0128f) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1689.0128f) .put(ColumnHolder.TIME_COLUMN_NAME, 1294876800000L) .build() ) @@ -924,21 +924,21 @@ public class SelectQueryRunnerTest private Map toPagingIdentifier(int startDelta, boolean descending) { - return ImmutableMap.of(segmentIdString, PagingOffset.toOffset(startDelta, descending)); + return ImmutableMap.of(SEGMENT_ID_STRING, PagingOffset.toOffset(startDelta, descending)); } private List>> toFullEvents(final String[]... valueSet) { - return toEvents(new String[]{EventHolder.timestampKey + ":TIME", - QueryRunnerTestHelper.marketDimension + ":STRING", - QueryRunnerTestHelper.qualityDimension + ":STRING", + return toEvents(new String[]{EventHolder.TIMESTAMP_KEY + ":TIME", + QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING", + QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING", "qualityLong" + ":LONG", "qualityFloat" + ":FLOAT", "qualityNumericString" + ":STRING", - QueryRunnerTestHelper.placementDimension + ":STRING", - QueryRunnerTestHelper.placementishDimension + ":STRINGS", - QueryRunnerTestHelper.indexMetric + ":FLOAT", - QueryRunnerTestHelper.partialNullDimension + ":STRING"}, + QueryRunnerTestHelper.PLACEMENT_DIMENSION + ":STRING", + QueryRunnerTestHelper.PLACEMENTISH_DIMENSION + ":STRINGS", + QueryRunnerTestHelper.INDEX_METRIC + ":FLOAT", + QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION + ":STRING"}, valueSet); } @@ -1014,7 +1014,7 @@ public class SelectQueryRunnerTest int lastOffset = holders.isEmpty() ? offset : holders.get(holders.size() - 1).getOffset(); expected.add( new Result( - new DateTime(group.get(0).get(EventHolder.timestampKey), ISOChronology.getInstanceUTC()), + new DateTime(group.get(0).get(EventHolder.TIMESTAMP_KEY), ISOChronology.getInstanceUTC()), new SelectResultValue( ImmutableMap.of(segmentId, lastOffset), Sets.newHashSet(dimensions), diff --git a/processing/src/test/java/org/apache/druid/query/select/SelectQuerySpecTest.java b/processing/src/test/java/org/apache/druid/query/select/SelectQuerySpecTest.java index 2915f6947a5..ba37c28b8f1 100644 --- a/processing/src/test/java/org/apache/druid/query/select/SelectQuerySpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/select/SelectQuerySpecTest.java @@ -75,11 +75,11 @@ public class SelectQuerySpecTest + "\"context\":null}"; SelectQuery query = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, - QueryRunnerTestHelper.allGran, + QueryRunnerTestHelper.ALL_GRAN, DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), Collections.singletonList("index"), null, @@ -119,11 +119,11 @@ public class SelectQuerySpecTest + "\"context\":null}"; SelectQuery queryWithNull = new SelectQuery( - new TableDataSource(QueryRunnerTestHelper.dataSource), + new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), new LegacySegmentSpec(Intervals.of("2011-01-12/2011-01-14")), true, null, - QueryRunnerTestHelper.allGran, + QueryRunnerTestHelper.ALL_GRAN, DefaultDimensionSpec.toSpec(Arrays.asList("market", "quality")), Collections.singletonList("index"), null, diff --git a/processing/src/test/java/org/apache/druid/query/spec/QuerySegmentSpecTest.java b/processing/src/test/java/org/apache/druid/query/spec/QuerySegmentSpecTest.java index f60544db771..fd0bd6b856b 100644 --- a/processing/src/test/java/org/apache/druid/query/spec/QuerySegmentSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/spec/QuerySegmentSpecTest.java @@ -34,12 +34,12 @@ import java.util.Map; */ public class QuerySegmentSpecTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testSerializationLegacyString() throws Exception { - QuerySegmentSpec spec = jsonMapper.readValue( + QuerySegmentSpec spec = JSON_MAPPER.readValue( "\"2011-10-01/2011-10-10,2011-11-01/2011-11-10\"", QuerySegmentSpec.class ); Assert.assertTrue(spec instanceof LegacySegmentSpec); @@ -52,7 +52,7 @@ public class QuerySegmentSpecTest @Test public void testSerializationLegacyArray() throws Exception { - QuerySegmentSpec spec = jsonMapper.readValue( + QuerySegmentSpec spec = JSON_MAPPER.readValue( "[\"2011-09-01/2011-10-10\", \"2011-11-01/2011-11-10\"]", QuerySegmentSpec.class ); Assert.assertTrue(spec instanceof LegacySegmentSpec); @@ -65,7 +65,7 @@ public class QuerySegmentSpecTest @Test public void testSerializationIntervals() throws Exception { - QuerySegmentSpec spec = jsonMapper.readValue( + QuerySegmentSpec spec = JSON_MAPPER.readValue( "{\"type\": \"intervals\", \"intervals\":[\"2011-08-01/2011-10-10\", \"2011-11-01/2011-11-10\"]}", QuerySegmentSpec.class ); @@ -79,7 +79,7 @@ public class QuerySegmentSpecTest @Test public void testSerializationSegments() { - QuerySegmentSpec spec = jsonMapper.convertValue( + QuerySegmentSpec spec = JSON_MAPPER.convertValue( ImmutableMap.of( "type", "segments", diff --git a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java index e7b71e6fa9c..ecbe8af2376 100644 --- a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryRunnerTest.java @@ -73,7 +73,7 @@ public class TimeBoundaryQueryRunnerTest } private final QueryRunner runner; - private static final QueryRunnerFactory factory = new TimeBoundaryQueryRunnerFactory( + private static final QueryRunnerFactory FACTORY = new TimeBoundaryQueryRunnerFactory( QueryRunnerTestHelper.NOOP_QUERYWATCHER ); private static Segment segment0; @@ -130,7 +130,7 @@ public class TimeBoundaryQueryRunnerTest private static SegmentId makeIdentifier(Interval interval, String version) { - return SegmentId.of(QueryRunnerTestHelper.dataSource, interval, version, NoneShardSpec.instance()); + return SegmentId.of(QueryRunnerTestHelper.DATA_SOURCE, interval, version, NoneShardSpec.instance()); } private QueryRunner getCustomRunner() throws IOException @@ -156,7 +156,7 @@ public class TimeBoundaryQueryRunnerTest new SingleElementPartitionChunk<>(ReferenceCountingSegment.wrapRootGenerationSegment(segment1)) ); - return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory); + return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, FACTORY); } @Test diff --git a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryTest.java b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryTest.java index 27ff47050d6..925eed2af77 100644 --- a/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeboundary/TimeBoundaryQueryTest.java @@ -32,7 +32,7 @@ import java.io.IOException; public class TimeBoundaryQueryTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testQuerySerialization() throws IOException @@ -41,8 +41,8 @@ public class TimeBoundaryQueryTest .dataSource("testing") .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/DefaultTimeseriesQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/DefaultTimeseriesQueryMetricsTest.java index e29599f3f78..191d9deb10d 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/DefaultTimeseriesQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/DefaultTimeseriesQueryMetricsTest.java @@ -49,11 +49,11 @@ public class DefaultTimeseriesQueryMetricsTest DefaultTimeseriesQueryMetrics queryMetrics = new DefaultTimeseriesQueryMetrics(TestHelper.makeJsonMapper()); TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(true) .build(); queryMetrics.query(query); @@ -65,9 +65,9 @@ public class DefaultTimeseriesQueryMetricsTest Assert.assertTrue(actualEvent.containsKey("timestamp")); Assert.assertEquals("", actualEvent.get("host")); Assert.assertEquals("", actualEvent.get("service")); - Assert.assertEquals(QueryRunnerTestHelper.dataSource, actualEvent.get(DruidMetrics.DATASOURCE)); + Assert.assertEquals(QueryRunnerTestHelper.DATA_SOURCE, actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - List expectedIntervals = QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(); + List expectedIntervals = QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(); List expectedStringIntervals = expectedIntervals.stream().map(Interval::toString).collect(Collectors.toList()); Assert.assertEquals(expectedStringIntervals, actualEvent.get(DruidMetrics.INTERVAL)); diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java index 44d767ef8ae..d920bd1b17f 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeSeriesUnionQueryRunnerTest.java @@ -83,17 +83,17 @@ public class TimeSeriesUnionQueryRunnerTest public void testUnionTimeseries() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.unionDataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -130,11 +130,11 @@ public class TimeSeriesUnionQueryRunnerTest ) ) ) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java index 056084716e9..fb7ac0a66bd 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryRunnerTest.java @@ -108,7 +108,7 @@ public class TimeseriesQueryRunnerTest // vectorize? Arrays.asList(false, true), // double vs. float - Arrays.asList(QueryRunnerTestHelper.commonDoubleAggregators, QueryRunnerTestHelper.commonFloatAggregators) + Arrays.asList(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, QueryRunnerTestHelper.COMMON_FLOAT_AGGREGATORS) ); // Add vectorization tests for any indexes that support it. @@ -159,13 +159,13 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.emptyInterval) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.EMPTY_INTERVAL) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexDoubleSum, + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_DOUBLE_SUM, new DoubleFirstAggregatorFactory("first", "index") ) @@ -195,15 +195,15 @@ public class TimeseriesQueryRunnerTest { Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(gran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexDoubleSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_DOUBLE_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -211,12 +211,12 @@ public class TimeseriesQueryRunnerTest Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); final String[] expectedIndex = descending ? - QueryRunnerTestHelper.expectedFullOnIndexValuesDesc : - QueryRunnerTestHelper.expectedFullOnIndexValues; + QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES_DESC : + QueryRunnerTestHelper.EXPECTED_FULL_ON_INDEX_VALUES; final DateTime expectedLast = descending ? - QueryRunnerTestHelper.earliest : - QueryRunnerTestHelper.last; + QueryRunnerTestHelper.EARLIEST : + QueryRunnerTestHelper.LAST; int count = 0; Result lastResult = null; @@ -231,11 +231,11 @@ public class TimeseriesQueryRunnerTest Assert.assertEquals( result.toString(), - QueryRunnerTestHelper.skippedDay.equals(current) ? 0L : 13L, + QueryRunnerTestHelper.SKIPPED_DAY.equals(current) ? 0L : 13L, value.getLongMetric("rows").longValue() ); - if (!QueryRunnerTestHelper.skippedDay.equals(current)) { + if (!QueryRunnerTestHelper.SKIPPED_DAY.equals(current)) { Assert.assertEquals( result.toString(), Doubles.tryParse(expectedIndex[count]).doubleValue(), @@ -302,9 +302,9 @@ public class TimeseriesQueryRunnerTest { Granularity gran = Granularities.DAY; TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(gran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .descending(descending) .context(makeContext()) .build(); @@ -312,8 +312,8 @@ public class TimeseriesQueryRunnerTest Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); final DateTime expectedLast = descending ? - QueryRunnerTestHelper.earliest : - QueryRunnerTestHelper.last; + QueryRunnerTestHelper.EARLIEST : + QueryRunnerTestHelper.LAST; Result lastResult = null; for (Result result : results) { @@ -336,9 +336,9 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Arrays.asList( new DoubleMaxAggregatorFactory("maxIndex", "index"), @@ -372,14 +372,14 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "upfront") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -387,13 +387,13 @@ public class TimeseriesQueryRunnerTest .build(); Assert.assertEquals( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "upfront", null), + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", null), query.getDimensionsFilter() ); final DateTime expectedLast = descending ? - QueryRunnerTestHelper.earliest : - QueryRunnerTestHelper.last; + QueryRunnerTestHelper.EARLIEST : + QueryRunnerTestHelper.LAST; Iterable> results = runner.run(QueryPlus.wrap(query)).toList(); @@ -408,12 +408,12 @@ public class TimeseriesQueryRunnerTest Assert.assertEquals( result.toString(), - QueryRunnerTestHelper.skippedDay.equals(result.getTimestamp()) ? 0L : 2L, + QueryRunnerTestHelper.SKIPPED_DAY.equals(result.getTimestamp()) ? 0L : 2L, value.getLongMetric("rows").longValue() ); Assert.assertEquals( result.toString(), - QueryRunnerTestHelper.skippedDay.equals(result.getTimestamp()) ? 0.0d : 2.0d, + QueryRunnerTestHelper.SKIPPED_DAY.equals(result.getTimestamp()) ? 0.0d : 2.0d, value.getDoubleMetric( "uniques" ), @@ -426,17 +426,17 @@ public class TimeseriesQueryRunnerTest public void testTimeseries() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -467,17 +467,17 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesGrandTotal() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(ImmutableMap.of(TimeseriesQuery.CTX_GRAND_TOTAL, true)) .build(); @@ -495,7 +495,7 @@ public class TimeseriesQueryRunnerTest 6619L, "uniques", QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 6633.0 ) ) @@ -513,7 +513,7 @@ public class TimeseriesQueryRunnerTest 5827L, "uniques", QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 5841.0 ) ) @@ -535,7 +535,7 @@ public class TimeseriesQueryRunnerTest 12446L, "uniques", QueryRunnerTestHelper.UNIQUES_9, - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, 12473.0 ) ) @@ -562,16 +562,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesIntervalOutOfRanges() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.emptyInterval) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.EMPTY_INTERVAL) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(ImmutableMap.of(TimeseriesQuery.SKIP_EMPTY_BUCKETS, false)) .build(); @@ -579,14 +579,14 @@ public class TimeseriesQueryRunnerTest expectedResults.add( new Result<>( - QueryRunnerTestHelper.emptyInterval.getIntervals().get(0).getStart(), + QueryRunnerTestHelper.EMPTY_INTERVAL.getIntervals().get(0).getStart(), new TimeseriesResultValue( TestHelper.createExpectedMap( "rows", 0L, "index", NullHandling.defaultLongValue(), - QueryRunnerTestHelper.addRowsIndexConstantMetric, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC, NullHandling.sqlCompatible() ? null : 1.0 ) ) @@ -615,14 +615,14 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory("idx", "expr"), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -661,11 +661,11 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithTimeZone() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .intervals("2011-03-31T00:00:00-07:00/2011-04-02T00:00:00-07:00") .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" @@ -707,7 +707,7 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithVaryingGran() { TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( @@ -716,12 +716,12 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -741,7 +741,7 @@ public class TimeseriesQueryRunnerTest assertExpectedResults(expectedResults1, results1); TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity("DAY") .intervals( Collections.singletonList( @@ -750,12 +750,12 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .context(makeContext()) @@ -778,8 +778,8 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesGranularityNotAlignedOnSegmentBoundariesWithFilter() { TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") .granularity( new PeriodGranularity( new Period("P7D"), @@ -794,7 +794,7 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" @@ -828,8 +828,8 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesQueryZeroFilling() { TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") .granularity(Granularities.HOUR) .intervals( Collections.singletonList( @@ -838,7 +838,7 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" @@ -895,8 +895,8 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesQueryGranularityNotAlignedWithRollupGranularity() { TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") .granularity( new PeriodGranularity( new Period("PT1H"), @@ -907,7 +907,7 @@ public class TimeseriesQueryRunnerTest .intervals(Collections.singletonList(Intervals.of("2011-04-15T00:00:00.000Z/2012"))) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" @@ -935,8 +935,8 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithVaryingGranWithFilter() { TimeseriesQuery query1 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") .granularity(new PeriodGranularity(new Period("P1M"), null, null)) .intervals( Collections.singletonList( @@ -945,12 +945,12 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) @@ -969,8 +969,8 @@ public class TimeseriesQueryRunnerTest assertExpectedResults(expectedResults1, results1); TimeseriesQuery query2 = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") .granularity("DAY") .intervals( Collections.singletonList( @@ -979,12 +979,12 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .context(makeContext()) @@ -1007,8 +1007,8 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesQueryBeyondTimeRangeOfData() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .intervals( new MultipleIntervalSegmentSpec( Collections.singletonList(Intervals.of("2015-01-01/2015-01-10")) @@ -1016,7 +1016,7 @@ public class TimeseriesQueryRunnerTest ) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" @@ -1037,16 +1037,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithOrFilter() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "spot", "upfront", "total_market") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1084,20 +1084,20 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithRegexFilter() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(new RegexDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "^.p.*$", null )) // spot and upfront - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1135,16 +1135,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithFilter1() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "spot") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1182,16 +1182,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithFilter2() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "upfront") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1229,16 +1229,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithFilter3() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "total_market") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1276,16 +1276,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithMultiDimFilterAndOr() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new OrDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", "business") + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new OrDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", "business") ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1323,16 +1323,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithMultiDimFilter() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null) ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1370,16 +1370,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithOtherMultiDimFilter() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "business", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "business", null) ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) - .aggregators(QueryRunnerTestHelper.commonDoubleAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) + .aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1417,22 +1417,22 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithNonExistentFilterInOr() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "upfront", "total_market", "billyblank" ) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1471,11 +1471,11 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithInFilter() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters( new InDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, Arrays.asList( "spot", "upfront", @@ -1485,13 +1485,13 @@ public class TimeseriesQueryRunnerTest null ) ) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1529,16 +1529,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithNonExistentFilterAndMultiDimAndOr() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new OrDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", "business", "billyblank") + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new OrDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", "business", "billyblank") ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1576,12 +1576,12 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithFilterOnNonExistentDimension() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters("bobby", "billy") - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1615,12 +1615,12 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithFilterOnNonExistentDimensionSkipBuckets() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters("bobby", "billy") - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .context(ImmutableMap.of("skipEmptyBuckets", "true")) .descending(descending) .context(makeContext(ImmutableMap.of("skipEmptyBuckets", "true"))) @@ -1637,12 +1637,12 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithNullFilterOnNonExistentDimension() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters("bobby", null) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1681,12 +1681,12 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithInvertedFilterOnNonExistentDimension() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(new NotDimFilter(new SelectorDimFilter("bobby", "sally", null))) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1725,12 +1725,12 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithNonExistentFilter() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.marketDimension, "billy") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "billy") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1763,16 +1763,16 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithNonExistentFilterAndMultiDim() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "billy", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "business", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "billy", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "business", null) ); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -1808,14 +1808,14 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( ImmutableList.of( - QueryRunnerTestHelper.indexDoubleSum, - QueryRunnerTestHelper.jsIndexSumIfPlacementishA, - QueryRunnerTestHelper.jsPlacementishCount + QueryRunnerTestHelper.INDEX_DOUBLE_SUM, + QueryRunnerTestHelper.JS_INDEX_SUM_IF_PLACEMENTISH_A, + QueryRunnerTestHelper.JS_PLACEMENTISH_COUNT ) ) .descending(descending) @@ -1824,7 +1824,7 @@ public class TimeseriesQueryRunnerTest Iterable> expectedResults = ImmutableList.of( new Result<>( - QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), + QueryRunnerTestHelper.FIRST_TO_THIRD.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 12459.361190795898d, @@ -1846,15 +1846,15 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "a") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( ImmutableList.of( - QueryRunnerTestHelper.indexDoubleSum, - QueryRunnerTestHelper.jsIndexSumIfPlacementishA, - QueryRunnerTestHelper.jsPlacementishCount + QueryRunnerTestHelper.INDEX_DOUBLE_SUM, + QueryRunnerTestHelper.JS_INDEX_SUM_IF_PLACEMENTISH_A, + QueryRunnerTestHelper.JS_PLACEMENTISH_COUNT ) ) .descending(descending) @@ -1863,7 +1863,7 @@ public class TimeseriesQueryRunnerTest List> expectedResults = ImmutableList.of( new Result<>( - QueryRunnerTestHelper.firstToThird.getIntervals().get(0).getStart(), + QueryRunnerTestHelper.FIRST_TO_THIRD.getIntervals().get(0).getStart(), new TimeseriesResultValue( ImmutableMap.of( "index", 283.31103515625d, @@ -1885,9 +1885,9 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.monthGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.MONTH_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( ImmutableList.of( new DoubleFirstAggregatorFactory("first", "index"), @@ -1992,23 +1992,23 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.placementishDimension, "preferred") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "preferred") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); TimeseriesQuery query1 = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2022,24 +2022,24 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.placementishDimension, "a") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); TimeseriesQuery query1 = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .filters(QueryRunnerTestHelper.qualityDimension, "automotive") - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive") + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2052,34 +2052,34 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithMultiValueDimFilterAndOr1() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.placementishDimension, "a", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", null) ); TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); AndDimFilter andDimFilter2 = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", null) ); TimeseriesQuery query2 = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter2) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2092,34 +2092,34 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithMultiValueDimFilterAndOr2() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new OrDimFilter(QueryRunnerTestHelper.placementishDimension, "a", "b") + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new OrDimFilter(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", "b") ); TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); AndDimFilter andDimFilter2 = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null), - new OrDimFilter(QueryRunnerTestHelper.qualityDimension, "automotive", "business") + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null), + new OrDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "automotive", "business") ); TimeseriesQuery query2 = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters(andDimFilter2) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(aggregatorFactoryList) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2133,21 +2133,21 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Lists.newArrayList( Iterables.concat( aggregatorFactoryList, Collections.singletonList(new FilteredAggregatorFactory( new CountAggregatorFactory("filteredAgg"), - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "spot", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null) )) ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2176,9 +2176,9 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Lists.newArrayList( Iterables.concat( @@ -2192,7 +2192,7 @@ public class TimeseriesQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2222,9 +2222,9 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Lists.newArrayList( Iterables.concat( @@ -2238,7 +2238,7 @@ public class TimeseriesQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2268,9 +2268,9 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Lists.newArrayList( Iterables.concat( @@ -2279,14 +2279,14 @@ public class TimeseriesQueryRunnerTest new FilteredAggregatorFactory( new CountAggregatorFactory("filteredAgg"), new NotDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "LolLol", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "LolLol", null) ) ) ) ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2315,9 +2315,9 @@ public class TimeseriesQueryRunnerTest { TimeseriesQuery query = Druids .newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Lists.newArrayList( Iterables.concat( @@ -2325,13 +2325,13 @@ public class TimeseriesQueryRunnerTest Collections.singletonList( new FilteredAggregatorFactory( new CountAggregatorFactory("filteredAgg"), - new NotDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, null, null)) + new NotDimFilter(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, null, null)) ) ) ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .context(makeContext()) .build(); @@ -2362,14 +2362,14 @@ public class TimeseriesQueryRunnerTest cannotVectorize(); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.jsCountIfTimeGreaterThan, - QueryRunnerTestHelper.__timeLongSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.JS_COUNT_IF_TIME_GREATER_THAN, + QueryRunnerTestHelper.TIME_LONG_SUM ) - .granularity(QueryRunnerTestHelper.allGran) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .descending(descending) .context(makeContext()) .build(); @@ -2399,13 +2399,13 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithBoundFilter1() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters( new AndDimFilter( Arrays.asList( new BoundDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "spa", "spot", true, @@ -2415,7 +2415,7 @@ public class TimeseriesQueryRunnerTest StringComparators.LEXICOGRAPHIC ), new BoundDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "spotify", null, @@ -2425,7 +2425,7 @@ public class TimeseriesQueryRunnerTest StringComparators.LEXICOGRAPHIC ), new BoundDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "SPOT", "spot", null, @@ -2437,13 +2437,13 @@ public class TimeseriesQueryRunnerTest ) ) ) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .context(makeContext()) .build(); @@ -2485,22 +2485,22 @@ public class TimeseriesQueryRunnerTest MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false); LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, true, null, true, true); TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) .filters( new SelectorDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", lookupExtractionFn ) ) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexLongSum, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_LONG_SUM, + QueryRunnerTestHelper.QUALITY_UNIQUES ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .context(makeContext()) .build(); @@ -2548,13 +2548,13 @@ public class TimeseriesQueryRunnerTest public void testTimeseriesWithLimit() { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) diff --git a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java index 1240dfc86e9..310880244e2 100644 --- a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java @@ -35,7 +35,7 @@ import java.util.Arrays; @RunWith(Parameterized.class) public class TimeseriesQueryTest { - private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); @Parameterized.Parameters(name = "descending={0}") public static Iterable constructorFeeder() @@ -54,16 +54,16 @@ public class TimeseriesQueryTest public void testQuerySerialization() throws IOException { Query query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.indexDoubleSum) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.ROWS_COUNT, QueryRunnerTestHelper.INDEX_DOUBLE_SUM) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .descending(descending) .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } diff --git a/processing/src/test/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpecTest.java b/processing/src/test/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpecTest.java index c05e5a51cbd..c39cadeef5b 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/AlphaNumericTopNMetricSpecTest.java @@ -37,7 +37,7 @@ public class AlphaNumericTopNMetricSpecTest @Test public void testComparator() { - final Comparator comparator = AlphaNumericTopNMetricSpec.comparator; + final Comparator comparator = AlphaNumericTopNMetricSpec.COMPARATOR; // equality Assert.assertEquals(0, comparator.compare("", "")); diff --git a/processing/src/test/java/org/apache/druid/query/topn/DefaultTopNQueryMetricsTest.java b/processing/src/test/java/org/apache/druid/query/topn/DefaultTopNQueryMetricsTest.java index 4f59e561b1d..8006303e1ab 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/DefaultTopNQueryMetricsTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/DefaultTopNQueryMetricsTest.java @@ -61,7 +61,7 @@ public class DefaultTopNQueryMetricsTest null )) .metric("count") - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("count")) .threshold(5) .filters(new SelectorDimFilter("tags", "t3", null)) @@ -77,7 +77,7 @@ public class DefaultTopNQueryMetricsTest Assert.assertEquals("", actualEvent.get("service")); Assert.assertEquals("xx", actualEvent.get(DruidMetrics.DATASOURCE)); Assert.assertEquals(query.getType(), actualEvent.get(DruidMetrics.TYPE)); - List expectedIntervals = QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(); + List expectedIntervals = QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(); List expectedStringIntervals = expectedIntervals.stream().map(Interval::toString).collect(Collectors.toList()); Assert.assertEquals(expectedStringIntervals, actualEvent.get(DruidMetrics.INTERVAL)); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java index 1c184ff2f07..a342071d0c7 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java @@ -57,7 +57,7 @@ public class TopNMetricSpecOptimizationsTest { private static final List AGGS = Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -72,14 +72,14 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators(AGGS) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); StorageAdapter adapter = @@ -106,14 +106,14 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators(AGGS) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); StorageAdapter adapter = @@ -141,14 +141,14 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators(AGGS) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); StorageAdapter adapter = @@ -176,15 +176,15 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .filters(QueryRunnerTestHelper.qualityDimension, "entertainment") - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "entertainment") + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators(AGGS) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); StorageAdapter adapter = @@ -211,14 +211,14 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators(AGGS) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java index 05342e5d455..aca6b749a1b 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryQueryToolChestTest.java @@ -73,7 +73,7 @@ import java.util.Map; public class TopNQueryQueryToolChestTest { - private static final SegmentId segmentId = SegmentId.dummy("testSegment"); + private static final SegmentId SEGMENT_ID = SegmentId.dummy("testSegment"); @Test public void testCacheStrategy() throws Exception @@ -254,7 +254,7 @@ public class TopNQueryQueryToolChestTest ); QueryRunner> runner = QueryRunnerTestHelper.makeQueryRunner( factory, - new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), segmentId), + new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SEGMENT_ID), null ); @@ -262,12 +262,12 @@ public class TopNQueryQueryToolChestTest context.put("minTopNThreshold", 500); TopNQueryBuilder builder = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.placementishDimension) - .metric(QueryRunnerTestHelper.indexMetric) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.commonDoubleAggregators); + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS); TopNQuery query1 = builder.threshold(10).context(null).build(); MockQueryRunner mockRunner = new MockQueryRunner(runner); @@ -320,16 +320,16 @@ public class TopNQueryQueryToolChestTest HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector(); switch (valueType) { case LONG: - collector.add(CardinalityAggregator.hashFn.hashLong((Long) dimValue).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashLong((Long) dimValue).asBytes()); break; case DOUBLE: - collector.add(CardinalityAggregator.hashFn.hashLong(Double.doubleToLongBits((Double) dimValue)).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashLong(Double.doubleToLongBits((Double) dimValue)).asBytes()); break; case FLOAT: - collector.add(CardinalityAggregator.hashFn.hashInt(Float.floatToIntBits((Float) dimValue)).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashInt(Float.floatToIntBits((Float) dimValue)).asBytes()); break; case STRING: - collector.add(CardinalityAggregator.hashFn.hashUnencodedChars((String) dimValue).asBytes()); + collector.add(CardinalityAggregator.HASH_FUNCTION.hashUnencodedChars((String) dimValue).asBytes()); break; default: throw new IllegalArgumentException("bad valueType: " + valueType); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java index eb7e3826cd4..4fdc3d6abf2 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerBenchmark.java @@ -54,20 +54,20 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark rtIndex, mMappedTestIndex, mergedRealtimeIndex, rtIndexOffheap } - private static final String marketDimension = "market"; - private static final SegmentId segmentId = SegmentId.dummy("testSegment"); + private static final String MARKET_DIMENSION = "market"; + private static final SegmentId SEGMENT_ID = SegmentId.dummy("testSegment"); - private static final TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + private static final TopNQuery QUERY = new TopNQueryBuilder() + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -75,9 +75,9 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); - private static final Map testCaseMap = new HashMap<>(); + private static final Map TEST_CASE_MAP = new HashMap<>(); @BeforeClass public static void setUp() @@ -99,27 +99,27 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()), QueryRunnerTestHelper.NOOP_QUERYWATCHER ); - testCaseMap.put( + TEST_CASE_MAP.put( TestCases.rtIndex, QueryRunnerTestHelper.makeQueryRunner( factory, - new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), segmentId), + new IncrementalIndexSegment(TestIndex.getIncrementalTestIndex(), SEGMENT_ID), null ) ); - testCaseMap.put( + TEST_CASE_MAP.put( TestCases.mMappedTestIndex, QueryRunnerTestHelper.makeQueryRunner( factory, - new QueryableIndexSegment(TestIndex.getMMappedTestIndex(), segmentId), + new QueryableIndexSegment(TestIndex.getMMappedTestIndex(), SEGMENT_ID), null ) ); - testCaseMap.put( + TEST_CASE_MAP.put( TestCases.mergedRealtimeIndex, QueryRunnerTestHelper.makeQueryRunner( factory, - new QueryableIndexSegment(TestIndex.mergedRealtimeIndex(), segmentId), + new QueryableIndexSegment(TestIndex.mergedRealtimeIndex(), SEGMENT_ID), null ) ); @@ -130,7 +130,7 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark @Test public void testmMapped() { - testCaseMap.get(TestCases.mMappedTestIndex).run(QueryPlus.wrap(query)); + TEST_CASE_MAP.get(TestCases.mMappedTestIndex).run(QueryPlus.wrap(QUERY)); } @Ignore @@ -138,7 +138,7 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark @Test public void testrtIndex() { - testCaseMap.get(TestCases.rtIndex).run(QueryPlus.wrap(query)); + TEST_CASE_MAP.get(TestCases.rtIndex).run(QueryPlus.wrap(QUERY)); } @Ignore @@ -146,7 +146,7 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark @Test public void testMerged() { - testCaseMap.get(TestCases.mergedRealtimeIndex).run(QueryPlus.wrap(query)); + TEST_CASE_MAP.get(TestCases.mergedRealtimeIndex).run(QueryPlus.wrap(QUERY)); } @Ignore @@ -154,6 +154,6 @@ public class TopNQueryRunnerBenchmark extends AbstractBenchmark @Test public void testOffHeap() { - testCaseMap.get(TestCases.rtIndexOffheap).run(QueryPlus.wrap(query)); + TEST_CASE_MAP.get(TestCases.rtIndexOffheap).run(QueryPlus.wrap(QUERY)); } } diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java index 6aa98412a8f..c6f894ad5dd 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryRunnerTest.java @@ -114,12 +114,12 @@ import java.util.stream.Collectors; @RunWith(Parameterized.class) public class TopNQueryRunnerTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Parameterized.Parameters(name = "{0}") @@ -136,9 +136,9 @@ public class TopNQueryRunnerTest params[3] = (i & 4) != 0; params[4] = (i & 8) != 0; params[5] = (i & 16) != 0; - params[6] = QueryRunnerTestHelper.commonDoubleAggregators; + params[6] = QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS; Object[] params2 = Arrays.copyOf(params, 7); - params2[6] = QueryRunnerTestHelper.commonFloatAggregators; + params2[6] = QueryRunnerTestHelper.COMMON_FLOAT_AGGREGATORS; parameters.add(params); parameters.add(params2); } @@ -272,12 +272,12 @@ public class TopNQueryRunnerTest public void testEmptyTopN() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.emptyInterval) + .intervals(QueryRunnerTestHelper.EMPTY_INTERVAL) .aggregators( Lists.newArrayList( Iterables.concat( @@ -290,7 +290,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = ImmutableList.of( @@ -306,12 +306,12 @@ public class TopNQueryRunnerTest public void testFullOnTopN() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -323,7 +323,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -332,7 +332,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -341,7 +341,7 @@ public class TopNQueryRunnerTest .put("minIndex", 792.3260498046875D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -350,7 +350,7 @@ public class TopNQueryRunnerTest .put("minIndex", 545.9906005859375D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) @@ -365,7 +365,7 @@ public class TopNQueryRunnerTest assertExpectedResults(expectedResults, query); assertExpectedResults(expectedResults, query.withAggregatorSpecs(Lists.newArrayList(Iterables.concat( - QueryRunnerTestHelper.commonFloatAggregators, + QueryRunnerTestHelper.COMMON_FLOAT_AGGREGATORS, Lists.newArrayList( new FloatMaxAggregatorFactory("maxIndex", "indexFloat"), new FloatMinAggregatorFactory("minIndex", "indexFloat") @@ -378,12 +378,12 @@ public class TopNQueryRunnerTest public void testTopNOnMissingColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("nonexistentColumn", "alias")) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("rows")) .build(); @@ -404,12 +404,12 @@ public class TopNQueryRunnerTest public void testTopNOnMissingColumnWithExtractionFn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new ExtractionDimensionSpec("nonexistentColumn", "alias", new StringFormatExtractionFn("theValue"))) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new CountAggregatorFactory("rows")) .build(); @@ -433,12 +433,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNOverPostAggs() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.addRowsIndexConstantMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -450,7 +450,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -459,7 +459,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -468,7 +468,7 @@ public class TopNQueryRunnerTest .put("minIndex", 792.3260498046875D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -477,7 +477,7 @@ public class TopNQueryRunnerTest .put("minIndex", 545.9906005859375D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) @@ -496,12 +496,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNOverPostAggsOnDimension() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("dimPostAgg") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -529,7 +529,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("dimPostAgg", "upfrontx") .put("rows", 186L) .put("index", 192046.1060180664D) @@ -538,7 +538,7 @@ public class TopNQueryRunnerTest .put("minIndex", 545.9906005859375D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("dimPostAgg", "total_marketx") .put("rows", 186L) .put("index", 215679.82879638672D) @@ -547,7 +547,7 @@ public class TopNQueryRunnerTest .put("minIndex", 792.3260498046875D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("dimPostAgg", "spotx") .put("rows", 837L) .put("index", 95606.57232284546D) @@ -566,12 +566,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNOverUniques() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.uniqueMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -583,7 +583,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -629,12 +629,12 @@ public class TopNQueryRunnerTest public void testTopNOverMissingUniques() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.uniqueMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new HyperUniquesAggregatorFactory("uniques", "missingUniques")) .build(); @@ -666,17 +666,17 @@ public class TopNQueryRunnerTest public void testTopNOverHyperUniqueFinalizingPostAggregator() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.qualityUniques) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.QUALITY_UNIQUES) .postAggregators( new HyperUniqueFinalizingPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, - QueryRunnerTestHelper.uniqueMetric + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, + QueryRunnerTestHelper.UNIQUE_METRIC ) ) .build(); @@ -688,18 +688,18 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("market", "spot") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_9) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_9) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9) .build(), ImmutableMap.builder() .put("market", "total_market") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2) .build(), ImmutableMap.builder() .put("market", "upfront") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2) .build() ) ) @@ -712,16 +712,16 @@ public class TopNQueryRunnerTest public void testTopNOverHyperUniqueExpression() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.qualityUniques) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.QUALITY_UNIQUES) .postAggregators( new ExpressionPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, "uniques + 1", null, TestExprMacroTable.INSTANCE @@ -736,22 +736,22 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("market", "spot") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_9) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_9) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 + 1 ) .build(), ImmutableMap.builder() .put("market", "total_market") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1 ) .build(), ImmutableMap.builder() .put("market", "upfront") - .put(QueryRunnerTestHelper.uniqueMetric, QueryRunnerTestHelper.UNIQUES_2) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + .put(QueryRunnerTestHelper.UNIQUE_METRIC, QueryRunnerTestHelper.UNIQUES_2) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1 ) .build() @@ -766,16 +766,16 @@ public class TopNQueryRunnerTest public void testTopNOverHyperUniqueExpressionRounded() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC) .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) - .aggregators(QueryRunnerTestHelper.qualityUniquesRounded) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) + .aggregators(QueryRunnerTestHelper.QUALITY_UNIQUES_ROUNDED) .postAggregators( new ExpressionPostAggregator( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, "uniques + 1", null, TestExprMacroTable.INSTANCE @@ -790,18 +790,18 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("market", "spot") - .put(QueryRunnerTestHelper.uniqueMetric, 9L) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 10L) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, 9L) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, 10L) .build(), ImmutableMap.builder() .put("market", "total_market") - .put(QueryRunnerTestHelper.uniqueMetric, 2L) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 3L) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, 2L) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, 3L) .build(), ImmutableMap.builder() .put("market", "upfront") - .put(QueryRunnerTestHelper.uniqueMetric, 2L) - .put(QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, 3L) + .put(QueryRunnerTestHelper.UNIQUE_METRIC, 2L) + .put(QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, 3L) .build() ) ) @@ -814,12 +814,12 @@ public class TopNQueryRunnerTest public void testTopNOverFirstLastAggregator() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.monthGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.MONTH_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("last") .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") @@ -923,12 +923,12 @@ public class TopNQueryRunnerTest public void testTopNOverFirstLastAggregatorChunkPeriod() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.monthGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.MONTH_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("last") .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( new LongFirstAggregatorFactory("first", "index"), new LongLastAggregatorFactory("last", "index") @@ -1035,12 +1035,12 @@ public class TopNQueryRunnerTest public void testTopNOverFirstLastFloatAggregatorUsingDoubleColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.monthGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.MONTH_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("last") .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( new FloatFirstAggregatorFactory("first", "index"), new FloatLastAggregatorFactory("last", "index") @@ -1144,12 +1144,12 @@ public class TopNQueryRunnerTest public void testTopNOverFirstLastFloatAggregatorUsingFloatColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.monthGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.MONTH_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("last") .threshold(3) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( new FloatFirstAggregatorFactory("first", "indexFloat"), new FloatLastAggregatorFactory("last", "indexFloat") @@ -1258,14 +1258,14 @@ public class TopNQueryRunnerTest final HashMap specialContext = new HashMap(); specialContext.put("bySegment", "true"); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .context(specialContext) .build(); @@ -1278,21 +1278,21 @@ public class TopNQueryRunnerTest ImmutableMap.of( "addRowsIndexConstant", 5356.814783D, "index", 5351.814783D, - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L ), ImmutableMap.of( "addRowsIndexConstant", 4880.669692D, "index", 4875.669692D, - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "uniques", QueryRunnerTestHelper.UNIQUES_2, "rows", 4L ), ImmutableMap.of( "addRowsIndexConstant", 2250.876812D, "index", 2231.876812D, - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 18L ) @@ -1340,14 +1340,14 @@ public class TopNQueryRunnerTest public void testTopN() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); @@ -1357,21 +1357,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -1388,14 +1388,14 @@ public class TopNQueryRunnerTest public void testTopNByUniques() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new NumericTopNMetricSpec("uniques")) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); @@ -1436,15 +1436,15 @@ public class TopNQueryRunnerTest public void testTopNWithOrFilter1() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "total_market", "upfront", "spot") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "upfront", "spot") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1453,21 +1453,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -1484,15 +1484,15 @@ public class TopNQueryRunnerTest public void testTopNWithOrFilter2() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "total_market", "upfront") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "upfront") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1501,14 +1501,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -1525,15 +1525,15 @@ public class TopNQueryRunnerTest public void testTopNWithFilter1() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "upfront") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1542,7 +1542,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Collections.>singletonList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -1559,15 +1559,15 @@ public class TopNQueryRunnerTest public void testTopNWithFilter2() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.qualityDimension, "mezzanine") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1576,21 +1576,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 2L, "index", 2591.68359375D, "addRowsIndexConstant", 2594.68359375D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 2L, "index", 2508.39599609375D, "addRowsIndexConstant", 2511.39599609375D, "uniques", QueryRunnerTestHelper.UNIQUES_1 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 2L, "index", 220.63774871826172D, "addRowsIndexConstant", 223.63774871826172D, @@ -1607,11 +1607,11 @@ public class TopNQueryRunnerTest public void testTopNWithFilter2OneDay() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.qualityDimension, "mezzanine") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) .intervals( new MultipleIntervalSegmentSpec( @@ -1619,7 +1619,7 @@ public class TopNQueryRunnerTest ) ) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1628,21 +1628,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 1L, "index", new Float(1447.341160).doubleValue(), "addRowsIndexConstant", new Float(1449.341160).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 1L, "index", new Float(1314.839715).doubleValue(), "addRowsIndexConstant", new Float(1316.839715).doubleValue(), "uniques", QueryRunnerTestHelper.UNIQUES_1 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 1L, "index", new Float(109.705815).doubleValue(), "addRowsIndexConstant", new Float(111.705815).doubleValue(), @@ -1659,15 +1659,15 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentFilterInOr() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "total_market", "upfront", "billyblank") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "upfront", "billyblank") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1676,14 +1676,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -1700,15 +1700,15 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentFilter() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "billyblank") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "billyblank") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); assertExpectedResults( Collections.singletonList( @@ -1722,19 +1722,19 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentFilterMultiDim() { AndDimFilter andDimFilter = new AndDimFilter( - new SelectorDimFilter(QueryRunnerTestHelper.marketDimension, "billyblank", null), - new SelectorDimFilter(QueryRunnerTestHelper.qualityDimension, "mezzanine", null) + new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "billyblank", null), + new SelectorDimFilter(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", null) ); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters(andDimFilter) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); assertExpectedResults( Collections.singletonList( @@ -1748,29 +1748,29 @@ public class TopNQueryRunnerTest public void testTopNWithMultiValueDimFilter1() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "m") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "m") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); assertExpectedResults( runWithMerge( new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.qualityDimension, "mezzanine") - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine") + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build() ).toList(), query @@ -1781,34 +1781,34 @@ public class TopNQueryRunnerTest public void testTopNWithMultiValueDimFilter2() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "m", "a", "b") - .dimension(QueryRunnerTestHelper.qualityDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "m", "a", "b") + .dimension(QueryRunnerTestHelper.QUALITY_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); assertExpectedResults( runWithMerge( new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters( - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, "mezzanine", "automotive", "business" ) - .dimension(QueryRunnerTestHelper.qualityDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dimension(QueryRunnerTestHelper.QUALITY_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build() ).toList(), query @@ -1819,15 +1819,15 @@ public class TopNQueryRunnerTest public void testTopNWithMultiValueDimFilter3() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "a") - .dimension(QueryRunnerTestHelper.placementishDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a") + .dimension(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); final List> expectedResults = Collections.singletonList( @@ -1860,15 +1860,15 @@ public class TopNQueryRunnerTest public void testTopNWithMultiValueDimFilter4() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "a", "b") - .dimension(QueryRunnerTestHelper.placementishDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "a", "b") + .dimension(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); final List> expectedResults = Collections.singletonList( @@ -1908,15 +1908,15 @@ public class TopNQueryRunnerTest public void testTopNWithMultiValueDimFilter5() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.placementishDimension, "preferred") - .dimension(QueryRunnerTestHelper.placementishDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION, "preferred") + .dimension(QueryRunnerTestHelper.PLACEMENTISH_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); final List> expectedResults = Collections.singletonList( @@ -1963,14 +1963,14 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentDimension() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("doesn't exist") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(1) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -1996,15 +1996,15 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentDimensionAndActualFilter() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .filters(QueryRunnerTestHelper.marketDimension, "upfront") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .filters(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .dimension("doesn't exist") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2030,15 +2030,15 @@ public class TopNQueryRunnerTest public void testTopNWithNonExistentDimensionAndNonExistentFilter() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters("doesn't exist", null) .dimension("doesn't exist") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(1) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2064,14 +2064,14 @@ public class TopNQueryRunnerTest public void testTopNLexicographic() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec("", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2080,21 +2080,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2111,12 +2111,12 @@ public class TopNQueryRunnerTest public void testTopNLexicographicNoAggregators() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec("", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .build(); List> expectedResults = Collections.singletonList( @@ -2125,13 +2125,13 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot" + QueryRunnerTestHelper.MARKET_DIMENSION, "spot" ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market" + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market" ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront" + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront" ) ) ) @@ -2144,14 +2144,14 @@ public class TopNQueryRunnerTest public void testTopNLexicographicWithPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec("spot", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2160,14 +2160,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2184,14 +2184,14 @@ public class TopNQueryRunnerTest public void testTopNLexicographicWithNonExistingPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec("t", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2200,14 +2200,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2224,14 +2224,14 @@ public class TopNQueryRunnerTest public void testTopNInvertedLexicographicWithPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec("upfront", StringComparators.LEXICOGRAPHIC))) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2240,14 +2240,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -2264,14 +2264,14 @@ public class TopNQueryRunnerTest public void testTopNInvertedLexicographicWithNonExistingPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec("u", StringComparators.LEXICOGRAPHIC))) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2280,14 +2280,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -2305,20 +2305,20 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionToOne() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new JavaScriptExtractionFn("function(f) { return \"POTATO\"; }", false, JavaScriptConfig.getEnabledInstance()) ) ) .metric("rows") .threshold(10) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2329,7 +2329,7 @@ public class TopNQueryRunnerTest ImmutableMap.of( "addRowsIndexConstant", 504542.5071372986D, "index", 503332.5071372986D, - QueryRunnerTestHelper.marketDimension, "POTATO", + QueryRunnerTestHelper.MARKET_DIMENSION, "POTATO", "uniques", QueryRunnerTestHelper.UNIQUES_9, "rows", 1209L ) @@ -2347,8 +2347,8 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionTimeToOneLong() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( ColumnHolder.TIME_COLUMN_NAME, @@ -2363,9 +2363,9 @@ public class TopNQueryRunnerTest ) .metric("rows") .threshold(10) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2394,23 +2394,23 @@ public class TopNQueryRunnerTest public void testTopNCollapsingDimExtraction() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, new RegexDimExtractionFn(".(.)", false, null) ) ) .metric("index") .threshold(2) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexDoubleSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_DOUBLE_SUM ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2419,13 +2419,13 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.qualityDimension, "e", + QueryRunnerTestHelper.QUALITY_DIMENSION, "e", "rows", 558L, "index", 246645.1204032898, "addRowsIndexConstant", 247204.1204032898 ), ImmutableMap.of( - QueryRunnerTestHelper.qualityDimension, "r", + QueryRunnerTestHelper.QUALITY_DIMENSION, "r", "rows", 372L, "index", 222051.08961486816, "addRowsIndexConstant", 222424.08961486816 @@ -2439,7 +2439,7 @@ public class TopNQueryRunnerTest query = query.withAggregatorSpecs( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new DoubleSumAggregatorFactory("index", null, "-index + 100", ExprMacroTable.nil()) ) ); @@ -2447,7 +2447,7 @@ public class TopNQueryRunnerTest expectedResults = Collections.singletonList( TopNQueryRunnerTestHelper.createExpectedRows( "2011-01-12T00:00:00.000Z", - new String[] {QueryRunnerTestHelper.qualityDimension, "rows", "index", "addRowsIndexConstant"}, + new String[] {QueryRunnerTestHelper.QUALITY_DIMENSION, "rows", "index", "addRowsIndexConstant"}, Arrays.asList( new Object[] {"n", 93L, -2786.4727909999997, -2692.4727909999997}, new Object[] {"u", 186L, -3949.824348000002, -3762.824348000002} @@ -2462,20 +2462,20 @@ public class TopNQueryRunnerTest public void testTopNDimExtraction() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("(.)", false, null) ) ) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2484,21 +2484,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "s", + QueryRunnerTestHelper.MARKET_DIMENSION, "s", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "u", + QueryRunnerTestHelper.MARKET_DIMENSION, "u", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2515,18 +2515,18 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionNoAggregators() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("(.)", false, null) ) ) - .metric(new LexicographicTopNMetricSpec(QueryRunnerTestHelper.marketDimension)) + .metric(new LexicographicTopNMetricSpec(QueryRunnerTestHelper.MARKET_DIMENSION)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .build(); List> expectedResults = Collections.singletonList( @@ -2535,13 +2535,13 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "s" + QueryRunnerTestHelper.MARKET_DIMENSION, "s" ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t" + QueryRunnerTestHelper.MARKET_DIMENSION, "t" ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "u" + QueryRunnerTestHelper.MARKET_DIMENSION, "u" ) ) ) @@ -2554,12 +2554,12 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionFastTopNOptimalWithReplaceMissing() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2575,9 +2575,9 @@ public class TopNQueryRunnerTest ) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2586,21 +2586,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot0", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1total_market0", + QueryRunnerTestHelper.MARKET_DIMENSION, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3upfront0", + QueryRunnerTestHelper.MARKET_DIMENSION, "3upfront0", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2618,12 +2618,12 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionFastTopNUnOptimalWithReplaceMissing() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2639,9 +2639,9 @@ public class TopNQueryRunnerTest ) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2650,21 +2650,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot0", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1total_market0", + QueryRunnerTestHelper.MARKET_DIMENSION, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3upfront0", + QueryRunnerTestHelper.MARKET_DIMENSION, "3upfront0", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2683,12 +2683,12 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionFastTopNOptimal() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2704,9 +2704,9 @@ public class TopNQueryRunnerTest ) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2715,21 +2715,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot0", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1total_market0", + QueryRunnerTestHelper.MARKET_DIMENSION, "1total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3upfront0", + QueryRunnerTestHelper.MARKET_DIMENSION, "3upfront0", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2747,12 +2747,12 @@ public class TopNQueryRunnerTest public void testTopNDimExtractionFastTopNUnOptimal() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2771,9 +2771,9 @@ public class TopNQueryRunnerTest ) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2782,21 +2782,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot0", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot0", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market0", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market0", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront0", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront0", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -2813,12 +2813,12 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtractionOptimalNamespace() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2837,9 +2837,9 @@ public class TopNQueryRunnerTest ) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2848,21 +2848,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "3total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, @@ -2879,12 +2879,12 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtractionUnOptimalNamespace() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2903,9 +2903,9 @@ public class TopNQueryRunnerTest ) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2914,21 +2914,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "3total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, @@ -2946,12 +2946,12 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtractionOptimalNamespaceWithRunner() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new LookupExtractionFn( new MapLookupExtractor( ImmutableMap.of( @@ -2970,9 +2970,9 @@ public class TopNQueryRunnerTest ) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -2981,21 +2981,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "1upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "1upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "2spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "2spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "3total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "3total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, @@ -3012,20 +3012,20 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtraction() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("(.)", false, null) ) ) .metric(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3034,21 +3034,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "s", + QueryRunnerTestHelper.MARKET_DIMENSION, "s", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "u", + QueryRunnerTestHelper.MARKET_DIMENSION, "u", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3065,20 +3065,20 @@ public class TopNQueryRunnerTest public void testInvertedTopNLexicographicDimExtraction2() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("..(.)", false, null) ) ) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec(null, StringComparators.LEXICOGRAPHIC))) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3087,21 +3087,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "o", + QueryRunnerTestHelper.MARKET_DIMENSION, "o", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "f", + QueryRunnerTestHelper.MARKET_DIMENSION, "f", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3118,20 +3118,20 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtractionWithPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("(.)", false, null) ) ) .metric(new DimensionTopNMetricSpec("s", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3140,14 +3140,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "u", + QueryRunnerTestHelper.MARKET_DIMENSION, "u", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3164,12 +3164,12 @@ public class TopNQueryRunnerTest public void testTopNLexicographicDimExtractionWithSortingPreservedAndPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new DimExtractionFn() { @Override @@ -3200,9 +3200,9 @@ public class TopNQueryRunnerTest ) .metric(new DimensionTopNMetricSpec("s", StringComparators.LEXICOGRAPHIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3211,14 +3211,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "u", + QueryRunnerTestHelper.MARKET_DIMENSION, "u", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3236,20 +3236,20 @@ public class TopNQueryRunnerTest public void testInvertedTopNLexicographicDimExtractionWithPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("(.)", false, null) ) ) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec("u", StringComparators.LEXICOGRAPHIC))) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3258,14 +3258,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "t", + QueryRunnerTestHelper.MARKET_DIMENSION, "t", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "s", + QueryRunnerTestHelper.MARKET_DIMENSION, "s", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -3282,20 +3282,20 @@ public class TopNQueryRunnerTest public void testInvertedTopNLexicographicDimExtractionWithPreviousStop2() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, new RegexDimExtractionFn("..(.)", false, null) ) ) .metric(new InvertedTopNMetricSpec(new DimensionTopNMetricSpec("p", StringComparators.LEXICOGRAPHIC))) .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3304,14 +3304,14 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "o", + QueryRunnerTestHelper.MARKET_DIMENSION, "o", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "f", + QueryRunnerTestHelper.MARKET_DIMENSION, "f", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3355,17 +3355,17 @@ public class TopNQueryRunnerTest }; final TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, nullStringDimExtraction ) ) @@ -3378,21 +3378,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), QueryRunnerTestHelper.orderedMap( - QueryRunnerTestHelper.marketDimension, null, + QueryRunnerTestHelper.MARKET_DIMENSION, null, "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3443,17 +3443,17 @@ public class TopNQueryRunnerTest }; final TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, emptyStringDimExtraction ) ) @@ -3466,21 +3466,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), QueryRunnerTestHelper.orderedMap( - QueryRunnerTestHelper.marketDimension, "", + QueryRunnerTestHelper.MARKET_DIMENSION, "", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, @@ -3499,14 +3499,14 @@ public class TopNQueryRunnerTest { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec(QueryRunnerTestHelper.indexMetric))) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(new InvertedTopNMetricSpec(new NumericTopNMetricSpec(QueryRunnerTestHelper.INDEX_METRIC))) .threshold(3) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3515,21 +3515,21 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, "uniques", QueryRunnerTestHelper.UNIQUES_9 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "upfront", + QueryRunnerTestHelper.MARKET_DIMENSION, "upfront", "rows", 4L, "index", 4875.669692D, "addRowsIndexConstant", 4880.669692D, "uniques", QueryRunnerTestHelper.UNIQUES_2 ), ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "total_market", + QueryRunnerTestHelper.MARKET_DIMENSION, "total_market", "rows", 4L, "index", 5351.814783D, "addRowsIndexConstant", 5356.814783D, @@ -3546,17 +3546,17 @@ public class TopNQueryRunnerTest public void testTopNQueryByComplexMetric() { ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new DefaultDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION )); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new NumericTopNMetricSpec("numVals")) .threshold(10) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(duplicateAggregators( new CardinalityAggregatorFactory("numVals", aggregatorDimensionSpecs, false), new CardinalityAggregatorFactory("numVals1", aggregatorDimensionSpecs, false) @@ -3597,23 +3597,23 @@ public class TopNQueryRunnerTest String helloJsFn = "function(str) { return 'hello' }"; ExtractionFn helloFn = new JavaScriptExtractionFn(helloJsFn, false, JavaScriptConfig.getEnabledInstance()); - DimensionSpec dimSpec = new ExtractionDimensionSpec(QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + DimensionSpec dimSpec = new ExtractionDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, helloFn); ImmutableList aggregatorDimensionSpecs = ImmutableList.of(new ExtractionDimensionSpec( - QueryRunnerTestHelper.qualityDimension, - QueryRunnerTestHelper.qualityDimension, + QueryRunnerTestHelper.QUALITY_DIMENSION, + QueryRunnerTestHelper.QUALITY_DIMENSION, helloFn )); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(dimSpec) .metric(new NumericTopNMetricSpec("numVals")) .threshold(10) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(duplicateAggregators( new CardinalityAggregatorFactory("numVals", aggregatorDimensionSpecs, false), new CardinalityAggregatorFactory("numVals1", aggregatorDimensionSpecs, false) @@ -3644,12 +3644,12 @@ public class TopNQueryRunnerTest public void testTopNDependentPostAgg() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -3662,8 +3662,8 @@ public class TopNQueryRunnerTest ) ) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG, QueryRunnerTestHelper.hyperUniqueFinalizingPostAgg ) .build(); @@ -3674,7 +3674,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -3683,12 +3683,12 @@ public class TopNQueryRunnerTest .put("maxIndex", 1743.92175D) .put("minIndex", 792.3260498046875D) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1.0 ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -3697,19 +3697,19 @@ public class TopNQueryRunnerTest .put("maxIndex", 1870.061029D) .put("minIndex", 545.9906005859375D) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1.0 ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) .put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 + 1.0 ) .put("maxIndex", 277.273533D) @@ -3726,12 +3726,12 @@ public class TopNQueryRunnerTest public void testTopNBySegmentResults() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -3744,15 +3744,15 @@ public class TopNQueryRunnerTest ) ) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG ) .context(ImmutableMap.of("finalize", true, "bySegment", true)) .build(); TopNResultValue topNResult = new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) @@ -3762,7 +3762,7 @@ public class TopNQueryRunnerTest .put("minIndex", 792.3260498046875D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) @@ -3772,7 +3772,7 @@ public class TopNQueryRunnerTest .put("minIndex", 545.9906005859375D) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) @@ -3790,7 +3790,7 @@ public class TopNQueryRunnerTest DateTimes.of("2011-01-12T00:00:00.000Z"), new BySegmentResultValueClass<>( Collections.singletonList(new Result<>(DateTimes.of("2011-01-12T00:00:00.000Z"), topNResult)), - QueryRunnerTestHelper.segmentId.toString(), + QueryRunnerTestHelper.SEGMENT_ID.toString(), Intervals.of("1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z") ) ) @@ -3805,15 +3805,15 @@ public class TopNQueryRunnerTest public void testTopNWithTimeColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.jsCountIfTimeGreaterThan, - QueryRunnerTestHelper.__timeLongSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.JS_COUNT_IF_TIME_GREATER_THAN, + QueryRunnerTestHelper.TIME_LONG_SUM ) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("ntimestamps") .threshold(3) .build(); @@ -3861,8 +3861,8 @@ public class TopNQueryRunnerTest public void testTopNTimeExtraction() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( ColumnHolder.TIME_COLUMN_NAME, @@ -3872,12 +3872,12 @@ public class TopNQueryRunnerTest ) .metric("index") .threshold(2) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( - QueryRunnerTestHelper.rowsCount, - QueryRunnerTestHelper.indexDoubleSum + QueryRunnerTestHelper.ROWS_COUNT, + QueryRunnerTestHelper.INDEX_DOUBLE_SUM ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -3908,12 +3908,12 @@ public class TopNQueryRunnerTest public void testTopNOverNullDimension() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("null_column") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -3925,7 +3925,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); Map map = new HashMap<>(); @@ -3953,15 +3953,15 @@ public class TopNQueryRunnerTest public void testTopNOverNullDimensionWithFilter() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("null_column") .filters( new SelectorDimFilter("null_column", null, null) ) - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -3973,7 +3973,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); Map map = new HashMap<>(); @@ -4001,12 +4001,12 @@ public class TopNQueryRunnerTest public void testTopNOverPartialNullDimension() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) .dimension("partial_null_column") - .metric(QueryRunnerTestHelper.uniqueMetric) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .threshold(1000) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) .build(); @@ -4038,13 +4038,13 @@ public class TopNQueryRunnerTest public void testTopNOverPartialNullDimensionWithFilterOnNullValue() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) .dimension("partial_null_column") - .metric(QueryRunnerTestHelper.uniqueMetric) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .filters(new SelectorDimFilter("partial_null_column", null, null)) .threshold(1000) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) .build(); @@ -4070,13 +4070,13 @@ public class TopNQueryRunnerTest public void testTopNOverPartialNullDimensionWithFilterOnNOTNullValue() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) .dimension("partial_null_column") - .metric(QueryRunnerTestHelper.uniqueMetric) + .metric(QueryRunnerTestHelper.UNIQUE_METRIC) .filters(new SelectorDimFilter("partial_null_column", "value", null)) .threshold(1000) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) .build(); @@ -4102,14 +4102,14 @@ public class TopNQueryRunnerTest public void testAlphaNumericTopNWithNullPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) - .dimension(QueryRunnerTestHelper.marketDimension) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .threshold(2) - .intervals(QueryRunnerTestHelper.secondOnly) + .intervals(QueryRunnerTestHelper.SECOND_ONLY) .aggregators(duplicateAggregators( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new CountAggregatorFactory("rows1") )) .build(); @@ -4141,14 +4141,14 @@ public class TopNQueryRunnerTest public void testNumericDimensionTopNWithNullPreviousStop() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) - .dimension(QueryRunnerTestHelper.marketDimension) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(2) - .intervals(QueryRunnerTestHelper.secondOnly) + .intervals(QueryRunnerTestHelper.SECOND_ONLY) .aggregators(duplicateAggregators( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new CountAggregatorFactory("rows1") )) .build(); @@ -4185,17 +4185,17 @@ public class TopNQueryRunnerTest MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false); LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, false); - TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("rows") .threshold(3) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .filters( new ExtractionDimFilter( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, "spot0", lookupExtractionFn, null @@ -4209,7 +4209,7 @@ public class TopNQueryRunnerTest new TopNResultValue( Collections.>singletonList( ImmutableMap.of( - QueryRunnerTestHelper.marketDimension, "spot", + QueryRunnerTestHelper.MARKET_DIMENSION, "spot", "rows", 18L, "index", 2231.876812D, "addRowsIndexConstant", 2250.876812D, @@ -4241,12 +4241,12 @@ public class TopNQueryRunnerTest } DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null); TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("null_column") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4261,7 +4261,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant); + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT); TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder .filters(extractionFilter) .build(); @@ -4321,12 +4321,12 @@ public class TopNQueryRunnerTest } DimFilter extractionFilter = new ExtractionDimFilter("null_column", "NULL", lookupExtractionFn, null); TopNQueryBuilder topNQueryBuilder = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension("null_column") - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4341,7 +4341,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant); + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT); TopNQuery topNQueryWithNULLValueExtraction = topNQueryBuilder .filters(extractionFilter) .build(); @@ -4374,12 +4374,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNFloatColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(new DefaultDimensionSpec(QueryRunnerTestHelper.indexMetric, "index_alias", ValueType.FLOAT)) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(new DefaultDimensionSpec(QueryRunnerTestHelper.INDEX_METRIC, "index_alias", ValueType.FLOAT)) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4391,7 +4391,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4401,7 +4401,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("index_alias", 1000.0f) - .put(QueryRunnerTestHelper.indexMetric, 2000.0D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 2000.0D) .put("rows", 2L) .put("addRowsIndexConstant", 2003.0D) .put("uniques", QueryRunnerTestHelper.UNIQUES_2) @@ -4410,7 +4410,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", 1870.061029f) - .put(QueryRunnerTestHelper.indexMetric, 1870.061029D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1870.061029D) .put("rows", 1L) .put("addRowsIndexConstant", 1872.06103515625D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4419,7 +4419,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", 1862.737933f) - .put(QueryRunnerTestHelper.indexMetric, 1862.737933D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1862.737933D) .put("rows", 1L) .put("addRowsIndexConstant", 1864.7379150390625D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4428,7 +4428,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", 1743.92175f) - .put(QueryRunnerTestHelper.indexMetric, 1743.92175D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1743.92175D) .put("rows", 1L) .put("addRowsIndexConstant", 1745.9217529296875D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4449,12 +4449,12 @@ public class TopNQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.indexMetric, "index_alias", jsExtractionFn)) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.INDEX_METRIC, "index_alias", jsExtractionFn)) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4466,7 +4466,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4476,7 +4476,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("index_alias", "super-1000") - .put(QueryRunnerTestHelper.indexMetric, 2000.0D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 2000.0D) .put("rows", 2L) .put("addRowsIndexConstant", 2003.0D) .put("uniques", QueryRunnerTestHelper.UNIQUES_2) @@ -4485,7 +4485,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", "super-1870.061029") - .put(QueryRunnerTestHelper.indexMetric, 1870.061029D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1870.061029D) .put("rows", 1L) .put("addRowsIndexConstant", 1872.06103515625D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4494,7 +4494,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", "super-1862.737933") - .put(QueryRunnerTestHelper.indexMetric, 1862.737933D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1862.737933D) .put("rows", 1L) .put("addRowsIndexConstant", 1864.7379150390625D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4503,7 +4503,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("index_alias", "super-1743.92175") - .put(QueryRunnerTestHelper.indexMetric, 1743.92175D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 1743.92175D) .put("rows", 1L) .put("addRowsIndexConstant", 1745.9217529296875D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4521,12 +4521,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNFloatColumnAsString() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("qualityFloat", "qf_alias")) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4538,7 +4538,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4548,7 +4548,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("qf_alias", "14000.0") - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4557,7 +4557,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qf_alias", "16000.0") - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4566,7 +4566,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qf_alias", "10000.0") - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4575,7 +4575,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qf_alias", "12000.0") - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4593,12 +4593,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNLongColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("qualityLong", "ql_alias", ValueType.LONG)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4610,7 +4610,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4620,7 +4620,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("ql_alias", 1400L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4629,7 +4629,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1600L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4638,7 +4638,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1000L) - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4647,7 +4647,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1200L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4665,12 +4665,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNLongVirtualColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("ql_expr", "ql_alias", ValueType.LONG)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4682,7 +4682,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .virtualColumns(new ExpressionVirtualColumn("ql_expr", "qualityLong", ValueType.LONG, ExprMacroTable.nil())) .build(); @@ -4693,7 +4693,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("ql_alias", 1400L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4702,7 +4702,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1600L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4711,7 +4711,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1000L) - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4720,7 +4720,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1200L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4738,8 +4738,8 @@ public class TopNQueryRunnerTest public void testTopNStringVirtualColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .virtualColumns( new ExpressionVirtualColumn( "vc", @@ -4751,9 +4751,9 @@ public class TopNQueryRunnerTest .dimension("vc") .metric("rows") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(commonAggregators) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4796,12 +4796,12 @@ public class TopNQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new ExtractionDimensionSpec("qualityLong", "ql_alias", jsExtractionFn)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4813,7 +4813,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4823,7 +4823,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("ql_alias", "super-1400") - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4832,7 +4832,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "super-1600") - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4841,7 +4841,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "super-1000") - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4850,7 +4850,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "super-1200") - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4868,12 +4868,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNLongColumnAsString() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("qualityLong", "ql_alias")) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4885,7 +4885,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4895,7 +4895,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("ql_alias", "1400") - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4904,7 +4904,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "1600") - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4913,7 +4913,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "1000") - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4922,7 +4922,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", "1200") - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4940,12 +4940,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNNumericStringColumnAsLong() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("qualityNumericString", "qns_alias", ValueType.LONG)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -4957,7 +4957,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -4967,7 +4967,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("qns_alias", 140000L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4976,7 +4976,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 160000L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4985,7 +4985,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 100000L) - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -4994,7 +4994,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 120000L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5012,12 +5012,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNNumericStringColumnAsFloat() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("qualityNumericString", "qns_alias", ValueType.FLOAT)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5029,7 +5029,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5039,7 +5039,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("qns_alias", 140000.0f) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5048,7 +5048,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 160000.0f) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5057,7 +5057,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 100000.0f) - .put(QueryRunnerTestHelper.indexMetric, 12270.807093D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12270.807093D) .put("rows", 93L) .put("addRowsIndexConstant", 12364.807093D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5066,7 +5066,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 120000.0f) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5084,12 +5084,12 @@ public class TopNQueryRunnerTest public void testFullOnTopNLongTimeColumn() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "time_alias", ValueType.LONG)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5101,7 +5101,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5111,7 +5111,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("time_alias", 1296345600000L) - .put(QueryRunnerTestHelper.indexMetric, 5497.331253051758D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 5497.331253051758D) .put("rows", 13L) .put("addRowsIndexConstant", 5511.331253051758D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5120,7 +5120,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", 1298678400000L) - .put(QueryRunnerTestHelper.indexMetric, 6541.463027954102D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6541.463027954102D) .put("rows", 13L) .put("addRowsIndexConstant", 6555.463027954102D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5129,7 +5129,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", 1301529600000L) - .put(QueryRunnerTestHelper.indexMetric, 6814.467971801758D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6814.467971801758D) .put("rows", 13L) .put("addRowsIndexConstant", 6828.467971801758D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5138,7 +5138,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", 1294876800000L) - .put(QueryRunnerTestHelper.indexMetric, 6077.949111938477D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6077.949111938477D) .put("rows", 13L) .put("addRowsIndexConstant", 6091.949111938477D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5156,12 +5156,12 @@ public class TopNQueryRunnerTest public void testSortOnDoubleAsLong() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("index", "index_alias", ValueType.LONG)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); List> expectedResults = Collections.singletonList( @@ -5192,12 +5192,12 @@ public class TopNQueryRunnerTest public void testSortOnTimeAsLong() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("__time", "__time_alias", ValueType.LONG)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); List> expectedResults = Collections.singletonList( @@ -5228,12 +5228,12 @@ public class TopNQueryRunnerTest public void testSortOnStringAsDouble() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("market", "alias", ValueType.DOUBLE)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); final Map nullAliasMap = new HashMap<>(); @@ -5252,12 +5252,12 @@ public class TopNQueryRunnerTest public void testSortOnDoubleAsDouble() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new DefaultDimensionSpec("index", "index_alias", ValueType.DOUBLE)) .metric(new DimensionTopNMetricSpec(null, StringComparators.NUMERIC)) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .build(); List> expectedResults = Collections.singletonList( @@ -5291,12 +5291,12 @@ public class TopNQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new ExtractionDimensionSpec(ColumnHolder.TIME_COLUMN_NAME, "time_alias", jsExtractionFn)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5308,7 +5308,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5318,7 +5318,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("time_alias", "super-1296345600000") - .put(QueryRunnerTestHelper.indexMetric, 5497.331253051758D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 5497.331253051758D) .put("rows", 13L) .put("addRowsIndexConstant", 5511.331253051758D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5327,7 +5327,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", "super-1298678400000") - .put(QueryRunnerTestHelper.indexMetric, 6541.463027954102D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6541.463027954102D) .put("rows", 13L) .put("addRowsIndexConstant", 6555.463027954102D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5336,7 +5336,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", "super-1301529600000") - .put(QueryRunnerTestHelper.indexMetric, 6814.467971801758D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6814.467971801758D) .put("rows", 13L) .put("addRowsIndexConstant", 6828.467971801758D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5345,7 +5345,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("time_alias", "super-1294876800000") - .put(QueryRunnerTestHelper.indexMetric, 6077.949111938477D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 6077.949111938477D) .put("rows", 13L) .put("addRowsIndexConstant", 6091.949111938477D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) @@ -5366,16 +5366,16 @@ public class TopNQueryRunnerTest ExtractionFn jsExtractionFn = new JavaScriptExtractionFn(jsFn, false, JavaScriptConfig.getEnabledInstance()); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, jsExtractionFn )) - .metric(QueryRunnerTestHelper.indexMetric) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5387,11 +5387,11 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); Map expectedMap = new HashMap<>(); - expectedMap.put(QueryRunnerTestHelper.marketDimension, null); + expectedMap.put(QueryRunnerTestHelper.MARKET_DIMENSION, null); expectedMap.put("rows", 1209L); expectedMap.put("index", 503332.5071372986D); expectedMap.put("addRowsIndexConstant", 504542.5071372986D); @@ -5418,12 +5418,12 @@ public class TopNQueryRunnerTest ExtractionFn strlenFn = StrlenExtractionFn.instance(); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.qualityDimension, "alias", ValueType.LONG, strlenFn)) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(new ExtractionDimensionSpec(QueryRunnerTestHelper.QUALITY_DIMENSION, "alias", ValueType.LONG, strlenFn)) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5435,7 +5435,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5445,7 +5445,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("alias", 9L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5454,7 +5454,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("alias", 7L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5463,7 +5463,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("alias", 10L) - .put(QueryRunnerTestHelper.indexMetric, 20479.497562408447D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 20479.497562408447D) .put("rows", 186L) .put("addRowsIndexConstant", 20666.497562408447D) .put("uniques", QueryRunnerTestHelper.UNIQUES_2) @@ -5472,7 +5472,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("alias", 13L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5496,12 +5496,12 @@ public class TopNQueryRunnerTest ); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(filteredSpec) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5513,7 +5513,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5523,7 +5523,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("qns_alias", 140000L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5532,7 +5532,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 160000L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5541,7 +5541,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("qns_alias", 120000L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5565,12 +5565,12 @@ public class TopNQueryRunnerTest ); TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(filteredSpec) .metric("maxIndex") .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( @@ -5582,7 +5582,7 @@ public class TopNQueryRunnerTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); List> expectedResults = Collections.singletonList( @@ -5592,7 +5592,7 @@ public class TopNQueryRunnerTest Arrays.>asList( ImmutableMap.builder() .put("ql_alias", 1400L) - .put(QueryRunnerTestHelper.indexMetric, 217725.41940800005D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 217725.41940800005D) .put("rows", 279L) .put("addRowsIndexConstant", 218005.41940800005D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5601,7 +5601,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1600L) - .put(QueryRunnerTestHelper.indexMetric, 210865.67977600006D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 210865.67977600006D) .put("rows", 279L) .put("addRowsIndexConstant", 211145.67977600006D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5610,7 +5610,7 @@ public class TopNQueryRunnerTest .build(), ImmutableMap.builder() .put("ql_alias", 1200L) - .put(QueryRunnerTestHelper.indexMetric, 12086.472791D) + .put(QueryRunnerTestHelper.INDEX_METRIC, 12086.472791D) .put("rows", 93L) .put("addRowsIndexConstant", 12180.472791D) .put("uniques", QueryRunnerTestHelper.UNIQUES_1) @@ -5629,16 +5629,16 @@ public class TopNQueryRunnerTest { List>> aggregations = new ArrayList<>(); aggregations.add(new Pair<>( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, Longs.asList(186L, 186L, 837L) )); Pair> indexAggregation = new Pair<>( - QueryRunnerTestHelper.indexDoubleSum, + QueryRunnerTestHelper.INDEX_DOUBLE_SUM, Doubles.asList(215679.82879638672D, 192046.1060180664D, 95606.57232284546D) ); aggregations.add(indexAggregation); aggregations.add(new Pair<>( - QueryRunnerTestHelper.qualityUniques, + QueryRunnerTestHelper.QUALITY_UNIQUES, Doubles.asList(QueryRunnerTestHelper.UNIQUES_2, QueryRunnerTestHelper.UNIQUES_2, QueryRunnerTestHelper.UNIQUES_9) )); aggregations.add(new Pair<>( @@ -5676,11 +5676,11 @@ public class TopNQueryRunnerTest boolean hasIndexAggregator = aggregationCombination.stream().anyMatch(agg -> "index".equals(agg.lhs.getName())); boolean hasRowsAggregator = aggregationCombination.stream().anyMatch(agg -> "rows".equals(agg.lhs.getName())); TopNQueryBuilder queryBuilder = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(aggregationCombination.stream().map(agg -> agg.lhs).collect(Collectors.toList())); String metric; if (hasIndexAggregator) { @@ -5690,16 +5690,16 @@ public class TopNQueryRunnerTest } queryBuilder.metric(metric); if (hasIndexAggregator && hasRowsAggregator) { - queryBuilder.postAggregators(QueryRunnerTestHelper.addRowsIndexConstant); + queryBuilder.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT); } TopNQuery query = queryBuilder.build(); ImmutableMap.Builder row1 = ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market"); + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market"); ImmutableMap.Builder row2 = ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront"); + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront"); ImmutableMap.Builder row3 = ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot"); + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot"); if (hasIndexAggregator && hasRowsAggregator) { row1.put("addRowsIndexConstant", 215866.82879638672D); row2.put("addRowsIndexConstant", 192233.1060180664D); @@ -5728,11 +5728,11 @@ public class TopNQueryRunnerTest { // this tests the stack overflow issue from https://github.com/apache/incubator-druid/issues/4628 TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension, "Market") + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION, "Market") .filters(new BoundDimFilter( - QueryRunnerTestHelper.indexMetric, + QueryRunnerTestHelper.INDEX_METRIC, "0", "46.64980229268867", true, @@ -5743,7 +5743,7 @@ public class TopNQueryRunnerTest )) .metric("Count") .threshold(5) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators(new LongSumAggregatorFactory("Count", "qualityLong")) .build(); @@ -5763,8 +5763,8 @@ public class TopNQueryRunnerTest public void testTopNWithNonBitmapFilter() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .filters(new BoundDimFilter( ColumnHolder.TIME_COLUMN_NAME, "0", @@ -5775,10 +5775,10 @@ public class TopNQueryRunnerTest null, StringComparators.NUMERIC )) - .dimension(QueryRunnerTestHelper.marketDimension) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric("count") .threshold(4) - .intervals(QueryRunnerTestHelper.firstToThird) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators(new DoubleSumAggregatorFactory("count", "qualityDouble")) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java index 65496170dfe..e987f45c228 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java @@ -40,22 +40,22 @@ import java.io.IOException; public class TopNQueryTest { - private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper(); @Test public void testQuerySerialization() throws IOException { Query query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) - .metric(QueryRunnerTestHelper.indexMetric) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) + .metric(QueryRunnerTestHelper.INDEX_METRIC) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -63,11 +63,11 @@ public class TopNQueryTest ) ) ) - .postAggregators(QueryRunnerTestHelper.addRowsIndexConstant) + .postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT) .build(); - String json = jsonMapper.writeValueAsString(query); - Query serdeQuery = jsonMapper.readValue(json, Query.class); + String json = JSON_MAPPER.writeValueAsString(query); + Query serdeQuery = JSON_MAPPER.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); } @@ -77,28 +77,22 @@ public class TopNQueryTest public void testQuerySerdeWithLookupExtractionFn() throws IOException { final TopNQuery expectedQuery = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension( new ExtractionDimensionSpec( - QueryRunnerTestHelper.marketDimension, - QueryRunnerTestHelper.marketDimension, - new LookupExtractionFn( - new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false), - true, - null, - false, - false - ) + QueryRunnerTestHelper.MARKET_DIMENSION, + QueryRunnerTestHelper.MARKET_DIMENSION, + new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false), true, null, false, false) ) ) - .metric(new NumericTopNMetricSpec(QueryRunnerTestHelper.indexMetric)) + .metric(new NumericTopNMetricSpec(QueryRunnerTestHelper.INDEX_METRIC)) .threshold(2) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals()) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals()) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -107,21 +101,21 @@ public class TopNQueryTest ) ) .build(); - final String str = jsonMapper.writeValueAsString(expectedQuery); - Assert.assertEquals(expectedQuery, jsonMapper.readValue(str, TopNQuery.class)); + final String str = JSON_MAPPER.writeValueAsString(expectedQuery); + Assert.assertEquals(expectedQuery, JSON_MAPPER.readValue(str, TopNQuery.class)); } @Test public void testQuerySerdeWithAlphaNumericTopNMetricSpec() throws IOException { TopNQuery expectedQuery = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(new LegacyDimensionSpec(QueryRunnerTestHelper.marketDimension)) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(new LegacyDimensionSpec(QueryRunnerTestHelper.MARKET_DIMENSION)) .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .threshold(2) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals()) - .aggregators(QueryRunnerTestHelper.rowsCount) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals()) + .aggregators(QueryRunnerTestHelper.ROWS_COUNT) .build(); String jsonQuery = "{\n" + " \"queryType\": \"topN\",\n" @@ -143,9 +137,9 @@ public class TopNQueryTest + " \"1970-01-01T00:00:00.000Z/2020-01-01T00:00:00.000Z\"\n" + " ]\n" + "}"; - TopNQuery actualQuery = jsonMapper.readValue( - jsonMapper.writeValueAsString( - jsonMapper.readValue( + TopNQuery actualQuery = JSON_MAPPER.readValue( + JSON_MAPPER.writeValueAsString( + JSON_MAPPER.readValue( jsonQuery, TopNQuery.class ) diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNUnionQueryTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNUnionQueryTest.java index 897b4832858..42e6a8e8244 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNUnionQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNUnionQueryTest.java @@ -48,12 +48,12 @@ import java.util.Map; @RunWith(Parameterized.class) public class TopNUnionQueryTest { - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } @Parameterized.Parameters(name = "{0}") @@ -104,16 +104,16 @@ public class TopNUnionQueryTest public void testTopNUnionQuery() { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.unionDataSource) - .granularity(QueryRunnerTestHelper.allGran) - .dimension(QueryRunnerTestHelper.marketDimension) + .dataSource(QueryRunnerTestHelper.UNION_DATA_SOURCE) + .granularity(QueryRunnerTestHelper.ALL_GRAN) + .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) - .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( - QueryRunnerTestHelper.commonDoubleAggregators, + QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -122,8 +122,8 @@ public class TopNUnionQueryTest ) ) .postAggregators( - QueryRunnerTestHelper.addRowsIndexConstant, - QueryRunnerTestHelper.dependentPostAgg, + QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, + QueryRunnerTestHelper.DEPENDENT_POST_AGG, QueryRunnerTestHelper.hyperUniqueFinalizingPostAgg ) .build(); @@ -134,7 +134,7 @@ public class TopNUnionQueryTest new TopNResultValue( Arrays.>asList( ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "total_market") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 744L) .put("index", 862719.3151855469D) .put("addRowsIndexConstant", 863464.3151855469D) @@ -143,12 +143,12 @@ public class TopNUnionQueryTest .put("maxIndex", 1743.9217529296875D) .put("minIndex", 792.3260498046875D) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1.0 ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "upfront") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 744L) .put("index", 768184.4240722656D) .put("addRowsIndexConstant", 768929.4240722656D) @@ -157,19 +157,19 @@ public class TopNUnionQueryTest .put("maxIndex", 1870.06103515625D) .put("minIndex", 545.9906005859375D) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_2 + 1.0 ) .build(), ImmutableMap.builder() - .put(QueryRunnerTestHelper.marketDimension, "spot") + .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 3348L) .put("index", 382426.28929138184D) .put("addRowsIndexConstant", 385775.28929138184D) .put(QueryRunnerTestHelper.dependentPostAggMetric, 389124.28929138184D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) .put( - QueryRunnerTestHelper.hyperUniqueFinalizingPostAggMetric, + QueryRunnerTestHelper.HYPER_UNIQUE_FINALIZING_POST_AGG_METRIC, QueryRunnerTestHelper.UNIQUES_9 + 1.0 ) .put("maxIndex", 277.2735290527344D) diff --git a/processing/src/test/java/org/apache/druid/segment/AppendTest.java b/processing/src/test/java/org/apache/druid/segment/AppendTest.java index 7bea453e658..e64b06122da 100644 --- a/processing/src/test/java/org/apache/druid/segment/AppendTest.java +++ b/processing/src/test/java/org/apache/druid/segment/AppendTest.java @@ -87,7 +87,7 @@ public class AppendTest }; final String dataSource = "testing"; - final Granularity allGran = Granularities.ALL; + final Granularity ALL_GRAN = Granularities.ALL; final String marketDimension = "market"; final String qualityDimension = "quality"; final String placementDimension = "placement"; @@ -554,7 +554,7 @@ public class AppendTest TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .filters(marketDimension, "breakstuff") .aggregators( @@ -578,7 +578,7 @@ public class AppendTest { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( @@ -599,7 +599,7 @@ public class AppendTest { return Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .filters( new OrDimFilter( @@ -626,7 +626,7 @@ public class AppendTest { return new TopNQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .dimension(marketDimension) .metric(indexMetric) .threshold(3) @@ -650,7 +650,7 @@ public class AppendTest { return new TopNQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .dimension(marketDimension) .metric(indexMetric) .threshold(3) @@ -680,7 +680,7 @@ public class AppendTest { return Druids.newSearchQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .query("a") .build(); @@ -691,7 +691,7 @@ public class AppendTest return Druids.newSearchQueryBuilder() .dataSource(dataSource) .filters(new NotDimFilter(new SelectorDimFilter(marketDimension, "spot", null))) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .query("a") .build(); diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java index 43a9ba59c2a..0cf4d0dd134 100644 --- a/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/SchemalessIndexTest.java @@ -67,9 +67,9 @@ import java.util.Map; public class SchemalessIndexTest { private static final Logger log = new Logger(SchemalessIndexTest.class); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); - private static final String testFile = "druid.sample.json"; + private static final String TEST_FILE = "druid.sample.json"; private static final String TIMESTAMP = "timestamp"; private static final List METRICS = Collections.singletonList("index"); private static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ @@ -82,13 +82,13 @@ public class SchemalessIndexTest new CountAggregatorFactory("count") }; - private static final IndexSpec indexSpec = new IndexSpec(); + private static final IndexSpec INDEX_SPEC = new IndexSpec(); - private static final List> events = new ArrayList<>(); + private static final List> EVENTS = new ArrayList<>(); - private static final Map> incrementalIndexes = new HashMap<>(); - private static final Map> mergedIndexes = new HashMap<>(); - private static final List rowPersistedIndexes = new ArrayList<>(); + private static final Map> INCREMENTAL_INDEXES = new HashMap<>(); + private static final Map> MERGED_INDEXES = new HashMap<>(); + private static final List ROW_PERSISTED_INDEXES = new ArrayList<>(); private static IncrementalIndex index = null; private static QueryableIndex mergedIndex = null; @@ -113,7 +113,7 @@ public class SchemalessIndexTest return index; } - index = makeIncrementalIndex(testFile, METRIC_AGGS); + index = makeIncrementalIndex(TEST_FILE, METRIC_AGGS); return index; } @@ -122,11 +122,11 @@ public class SchemalessIndexTest public static QueryableIndex getIncrementalIndex(int index1, int index2) { synchronized (log) { - if (events.isEmpty()) { + if (EVENTS.isEmpty()) { makeEvents(); } - Map entry = incrementalIndexes.get(index1); + Map entry = INCREMENTAL_INDEXES.get(index1); if (entry != null) { QueryableIndex index = entry.get(index2); if (index != null) { @@ -134,13 +134,13 @@ public class SchemalessIndexTest } } else { entry = new HashMap<>(); - incrementalIndexes.put(index1, entry); + INCREMENTAL_INDEXES.put(index1, entry); } IncrementalIndex theIndex = null; int count = 0; - for (final Map event : events) { + for (final Map event : EVENTS) { if (count != index1 && count != index2) { count++; continue; @@ -208,8 +208,8 @@ public class SchemalessIndexTest mergedFile.mkdirs(); mergedFile.deleteOnExit(); - indexMerger.persist(top, topFile, indexSpec, null); - indexMerger.persist(bottom, bottomFile, indexSpec, null); + indexMerger.persist(top, topFile, INDEX_SPEC, null); + indexMerger.persist(bottom, bottomFile, INDEX_SPEC, null); mergedIndex = indexIO.loadIndex( indexMerger.mergeQueryableIndex( @@ -217,7 +217,7 @@ public class SchemalessIndexTest true, METRIC_AGGS, mergedFile, - indexSpec, + INDEX_SPEC, null ) ); @@ -234,11 +234,11 @@ public class SchemalessIndexTest public QueryableIndex getMergedIncrementalIndex(int index1, int index2) { synchronized (log) { - if (rowPersistedIndexes.isEmpty()) { + if (ROW_PERSISTED_INDEXES.isEmpty()) { makeRowPersistedIndexes(); } - Map entry = mergedIndexes.get(index1); + Map entry = MERGED_INDEXES.get(index1); if (entry != null) { QueryableIndex index = entry.get(index2); if (index != null) { @@ -246,7 +246,7 @@ public class SchemalessIndexTest } } else { entry = new HashMap<>(); - mergedIndexes.put(index1, entry); + MERGED_INDEXES.put(index1, entry); } try { @@ -260,11 +260,11 @@ public class SchemalessIndexTest QueryableIndex index = indexIO.loadIndex( indexMerger.mergeQueryableIndex( - Arrays.asList(rowPersistedIndexes.get(index1), rowPersistedIndexes.get(index2)), + Arrays.asList(ROW_PERSISTED_INDEXES.get(index1), ROW_PERSISTED_INDEXES.get(index2)), true, METRIC_AGGS, mergedFile, - indexSpec, + INDEX_SPEC, null ) ); @@ -282,7 +282,7 @@ public class SchemalessIndexTest public QueryableIndex getMergedIncrementalIndex(int[] indexes) { synchronized (log) { - if (rowPersistedIndexes.isEmpty()) { + if (ROW_PERSISTED_INDEXES.isEmpty()) { makeRowPersistedIndexes(); } @@ -297,11 +297,11 @@ public class SchemalessIndexTest List indexesToMerge = new ArrayList<>(); for (int index : indexes) { - indexesToMerge.add(rowPersistedIndexes.get(index)); + indexesToMerge.add(ROW_PERSISTED_INDEXES.get(index)); } return indexIO.loadIndex( - indexMerger.mergeQueryableIndex(indexesToMerge, true, METRIC_AGGS, mergedFile, indexSpec, null) + indexMerger.mergeQueryableIndex(indexesToMerge, true, METRIC_AGGS, mergedFile, INDEX_SPEC, null) ); } catch (IOException e) { @@ -335,13 +335,13 @@ public class SchemalessIndexTest private static void makeEvents() { - URL resource = TestIndex.class.getClassLoader().getResource(testFile); + URL resource = TestIndex.class.getClassLoader().getResource(TEST_FILE); String filename = resource.getFile(); log.info("Realtime loading index file[%s]", filename); try { - for (Object obj : jsonMapper.readValue(new File(filename), List.class)) { - final Map event = jsonMapper.convertValue(obj, Map.class); - events.add(event); + for (Object obj : JSON_MAPPER.readValue(new File(filename), List.class)) { + final Map event = JSON_MAPPER.convertValue(obj, Map.class); + EVENTS.add(event); } } catch (Exception e) { @@ -353,11 +353,11 @@ public class SchemalessIndexTest { synchronized (log) { try { - if (events.isEmpty()) { + if (EVENTS.isEmpty()) { makeEvents(); } - for (final Map event : events) { + for (final Map event : EVENTS) { final long timestamp = new DateTime(event.get(TIMESTAMP), ISOChronology.getInstanceUTC()).getMillis(); final List dims = new ArrayList<>(); @@ -387,8 +387,8 @@ public class SchemalessIndexTest tmpFile.mkdirs(); tmpFile.deleteOnExit(); - indexMerger.persist(rowIndex, tmpFile, indexSpec, null); - rowPersistedIndexes.add(indexIO.loadIndex(tmpFile)); + indexMerger.persist(rowIndex, tmpFile, INDEX_SPEC, null); + ROW_PERSISTED_INDEXES.add(indexIO.loadIndex(tmpFile)); } } catch (IOException e) { @@ -416,9 +416,9 @@ public class SchemalessIndexTest .buildOnheap(); try { - final List events = jsonMapper.readValue(new File(filename), List.class); + final List events = JSON_MAPPER.readValue(new File(filename), List.class); for (Object obj : events) { - final Map event = jsonMapper.convertValue(obj, Map.class); + final Map event = JSON_MAPPER.convertValue(obj, Map.class); final List dims = new ArrayList<>(); for (Map.Entry entry : event.entrySet()) { @@ -453,7 +453,7 @@ public class SchemalessIndexTest theFile.mkdirs(); theFile.deleteOnExit(); filesToMap.add(theFile); - indexMerger.persist(index, theFile, indexSpec, null); + indexMerger.persist(index, theFile, INDEX_SPEC, null); } return filesToMap; @@ -520,7 +520,7 @@ public class SchemalessIndexTest ) ); - return indexIO.loadIndex(indexMerger.append(adapters, null, mergedFile, indexSpec, null)); + return indexIO.loadIndex(indexMerger.append(adapters, null, mergedFile, INDEX_SPEC, null)); } catch (IOException e) { throw new RuntimeException(e); @@ -561,7 +561,7 @@ public class SchemalessIndexTest true, METRIC_AGGS, mergedFile, - indexSpec, + INDEX_SPEC, null ) ); diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessTestFullTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessTestFullTest.java index a5487bcf67c..eb266304a88 100644 --- a/processing/src/test/java/org/apache/druid/segment/SchemalessTestFullTest.java +++ b/processing/src/test/java/org/apache/druid/segment/SchemalessTestFullTest.java @@ -89,7 +89,7 @@ public class SchemalessTestFullTest final SchemalessIndexTest schemalessIndexTest; final String dataSource = "testing"; - final Granularity allGran = Granularities.ALL; + final Granularity ALL_GRAN = Granularities.ALL; final String marketDimension = "market"; final String qualityDimension = "quality"; final String placementDimension = "placement"; @@ -1448,7 +1448,7 @@ public class SchemalessTestFullTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( @@ -1477,7 +1477,7 @@ public class SchemalessTestFullTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .filters(marketDimension, "spot") .aggregators( @@ -1505,7 +1505,7 @@ public class SchemalessTestFullTest { TopNQuery query = new TopNQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .dimension(marketDimension) .metric(indexMetric) .threshold(3) @@ -1536,7 +1536,7 @@ public class SchemalessTestFullTest { TopNQuery query = new TopNQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .dimension(marketDimension) .filters(marketDimension, "spot") .metric(indexMetric) @@ -1565,7 +1565,7 @@ public class SchemalessTestFullTest { SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .query("a") .build(); @@ -1579,7 +1579,7 @@ public class SchemalessTestFullTest { SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .filters(marketDimension, "spot") .intervals(fullOnInterval) .query("a") diff --git a/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java b/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java index 4cea909617c..590ba5b5995 100644 --- a/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java +++ b/processing/src/test/java/org/apache/druid/segment/SchemalessTestSimpleTest.java @@ -91,7 +91,7 @@ public class SchemalessTestSimpleTest } final String dataSource = "testing"; - final Granularity allGran = Granularities.ALL; + final Granularity ALL_GRAN = Granularities.ALL; final String marketDimension = "market"; final String qualityDimension = "quality"; final String placementDimension = "placement"; @@ -128,7 +128,7 @@ public class SchemalessTestSimpleTest { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .aggregators( Lists.newArrayList( @@ -172,7 +172,7 @@ public class SchemalessTestSimpleTest { TopNQuery query = new TopNQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .dimension(marketDimension) .metric(indexMetric) .threshold(3) @@ -245,7 +245,7 @@ public class SchemalessTestSimpleTest { SearchQuery query = Druids.newSearchQueryBuilder() .dataSource(dataSource) - .granularity(allGran) + .granularity(ALL_GRAN) .intervals(fullOnInterval) .query("a") .build(); diff --git a/processing/src/test/java/org/apache/druid/segment/TestIndex.java b/processing/src/test/java/org/apache/druid/segment/TestIndex.java index 556ea2b4f2a..9470b59903d 100644 --- a/processing/src/test/java/org/apache/druid/segment/TestIndex.java +++ b/processing/src/test/java/org/apache/druid/segment/TestIndex.java @@ -153,7 +153,7 @@ public class TestIndex new DoubleMaxAggregatorFactory(DOUBLE_METRICS[2], VIRTUAL_COLUMNS.getVirtualColumns()[0].getOutputName()), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }; - private static final IndexSpec indexSpec = new IndexSpec(); + private static final IndexSpec INDEX_SPEC = new IndexSpec(); private static final IndexMerger INDEX_MERGER = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()); @@ -200,8 +200,8 @@ public class TestIndex mergedFile.mkdirs(); mergedFile.deleteOnExit(); - INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec, null); - INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec, null); + INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, INDEX_SPEC, null); + INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, INDEX_SPEC, null); return INDEX_IO.loadIndex( INDEX_MERGER.mergeQueryableIndex( @@ -209,7 +209,7 @@ public class TestIndex true, METRIC_AGGS, mergedFile, - indexSpec, + INDEX_SPEC, null ) ); @@ -376,7 +376,7 @@ public class TestIndex someTmpFile.mkdirs(); someTmpFile.deleteOnExit(); - INDEX_MERGER.persist(index, someTmpFile, indexSpec, null); + INDEX_MERGER.persist(index, someTmpFile, INDEX_SPEC, null); return INDEX_IO.loadIndex(someTmpFile); } catch (IOException e) { diff --git a/processing/src/test/java/org/apache/druid/segment/data/BitmapCreationBenchmark.java b/processing/src/test/java/org/apache/druid/segment/data/BitmapCreationBenchmark.java index 386acd6b431..ba524d8ce5d 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/BitmapCreationBenchmark.java +++ b/processing/src/test/java/org/apache/druid/segment/data/BitmapCreationBenchmark.java @@ -71,11 +71,11 @@ public class BitmapCreationBenchmark extends AbstractBenchmark factory = serdeFactory.getBitmapFactory(); } - private static final int numBits = 100000; + private static final int NUM_BITS = 100000; static Random random; - static int[] randIndex = new int[numBits]; + static int[] randIndex = new int[NUM_BITS]; @AfterClass public static void cleanupAfterClass() @@ -89,12 +89,12 @@ public class BitmapCreationBenchmark extends AbstractBenchmark @BeforeClass public static void setupBeforeClass() { - for (int i = 0; i < numBits; ++i) { + for (int i = 0; i < NUM_BITS; ++i) { randIndex[i] = i; } // Random seed chosen by hitting keyboard with BOTH hands... multiple times! random = new Random(78591378); - for (int i = 0; i < numBits; ++i) { + for (int i = 0; i < NUM_BITS; ++i) { int idex = random.nextInt(randIndex.length); int swap = randIndex[i]; randIndex[i] = randIndex[idex]; @@ -111,7 +111,7 @@ public class BitmapCreationBenchmark extends AbstractBenchmark public void setup() { baseMutableBitmap = factory.makeEmptyMutableBitmap(); - for (int i = 0; i < numBits; ++i) { + for (int i = 0; i < NUM_BITS; ++i) { baseMutableBitmap.add(i); } baseImmutableBitmap = factory.makeImmutableBitmap(baseMutableBitmap); @@ -125,10 +125,10 @@ public class BitmapCreationBenchmark extends AbstractBenchmark public void testLinearAddition() { MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap(); - for (int i = 0; i < numBits; ++i) { + for (int i = 0; i < NUM_BITS; ++i) { mutableBitmap.add(i); } - Assert.assertEquals(numBits, mutableBitmap.size()); + Assert.assertEquals(NUM_BITS, mutableBitmap.size()); } @BenchmarkOptions(warmupRounds = 10, benchmarkRounds = 10) @@ -139,7 +139,7 @@ public class BitmapCreationBenchmark extends AbstractBenchmark for (int i : randIndex) { mutableBitmap.add(i); } - Assert.assertEquals(numBits, mutableBitmap.size()); + Assert.assertEquals(NUM_BITS, mutableBitmap.size()); } @BenchmarkOptions(warmupRounds = 10, benchmarkRounds = 1000) @@ -147,10 +147,10 @@ public class BitmapCreationBenchmark extends AbstractBenchmark public void testLinearAdditionDescending() { MutableBitmap mutableBitmap = factory.makeEmptyMutableBitmap(); - for (int i = numBits - 1; i >= 0; --i) { + for (int i = NUM_BITS - 1; i >= 0; --i) { mutableBitmap.add(i); } - Assert.assertEquals(numBits, mutableBitmap.size()); + Assert.assertEquals(NUM_BITS, mutableBitmap.size()); } @@ -168,7 +168,7 @@ public class BitmapCreationBenchmark extends AbstractBenchmark public void testFromImmutableByteArray() { ImmutableBitmap immutableBitmap = factory.mapImmutableBitmap(baseByteBuffer); - Assert.assertEquals(numBits, immutableBitmap.size()); + Assert.assertEquals(NUM_BITS, immutableBitmap.size()); } } diff --git a/processing/src/test/java/org/apache/druid/segment/data/CompressedLongsAutoEncodingSerdeTest.java b/processing/src/test/java/org/apache/druid/segment/data/CompressedLongsAutoEncodingSerdeTest.java index 8fe0bf8ac3a..07a24c3f558 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/CompressedLongsAutoEncodingSerdeTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/CompressedLongsAutoEncodingSerdeTest.java @@ -41,7 +41,7 @@ public class CompressedLongsAutoEncodingSerdeTest public static Iterable compressionStrategies() { List data = new ArrayList<>(); - for (long bpv : bitsPerValueParameters) { + for (long bpv : BITS_PER_VALUE_PARAMETERS) { for (CompressionStrategy strategy : CompressionStrategy.values()) { data.add(new Object[]{bpv, strategy, ByteOrder.BIG_ENDIAN}); data.add(new Object[]{bpv, strategy, ByteOrder.LITTLE_ENDIAN}); @@ -50,7 +50,7 @@ public class CompressedLongsAutoEncodingSerdeTest return data; } - private static final long[] bitsPerValueParameters = new long[]{1, 2, 4, 7, 11, 14, 18, 23, 31, 39, 46, 55, 62}; + private static final long[] BITS_PER_VALUE_PARAMETERS = new long[]{1, 2, 4, 7, 11, 14, 18, 23, 31, 39, 46, 55, 62}; protected final CompressionFactory.LongEncodingStrategy encodingStrategy = CompressionFactory.LongEncodingStrategy.AUTO; protected final CompressionStrategy compressionStrategy; diff --git a/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java b/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java index 319e66c81b9..b254811094b 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/IncrementalIndexTest.java @@ -99,12 +99,12 @@ public class IncrementalIndexTest IncrementalIndex createIndex(AggregatorFactory[] aggregatorFactories); } - private static final Closer resourceCloser = Closer.create(); + private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { - resourceCloser.close(); + RESOURCE_CLOSER.close(); } private final IndexCreator indexCreator; @@ -126,7 +126,7 @@ public class IncrementalIndexTest "OffheapIncrementalIndex-bufferPool", () -> ByteBuffer.allocate(256 * 1024) ); - resourceCloser.register(pool1); + RESOURCE_CLOSER.register(pool1); params.add( new Object[] { (IndexCreator) factories -> new Builder() @@ -140,7 +140,7 @@ public class IncrementalIndexTest "OffheapIncrementalIndex-bufferPool", () -> ByteBuffer.allocate(256 * 1024) ); - resourceCloser.register(pool2); + RESOURCE_CLOSER.register(pool2); params.add( new Object[] { (IndexCreator) factories -> new Builder() @@ -160,7 +160,7 @@ public class IncrementalIndexTest public static AggregatorFactory[] getDefaultCombiningAggregatorFactories() { - return defaultCombiningAggregatorFactories; + return DEFAULT_COMBINING_AGGREGATOR_FACTORIES; } public static IncrementalIndex createIndex( @@ -169,7 +169,7 @@ public class IncrementalIndexTest ) { if (null == aggregatorFactories) { - aggregatorFactories = defaultAggregatorFactories; + aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES; } return new IncrementalIndex.Builder() @@ -186,7 +186,7 @@ public class IncrementalIndexTest public static IncrementalIndex createIndex(AggregatorFactory[] aggregatorFactories) { if (null == aggregatorFactories) { - aggregatorFactories = defaultAggregatorFactories; + aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES; } return new IncrementalIndex.Builder() @@ -198,7 +198,7 @@ public class IncrementalIndexTest public static IncrementalIndex createNoRollupIndex(AggregatorFactory[] aggregatorFactories) { if (null == aggregatorFactories) { - aggregatorFactories = defaultAggregatorFactories; + aggregatorFactories = DEFAULT_AGGREGATOR_FACTORIES; } return new IncrementalIndex.Builder() @@ -250,21 +250,21 @@ public class IncrementalIndexTest return new MapBasedInputRow(timestamp, dimensionList, builder.build()); } - private static final AggregatorFactory[] defaultAggregatorFactories = new AggregatorFactory[]{ + private static final AggregatorFactory[] DEFAULT_AGGREGATOR_FACTORIES = new AggregatorFactory[]{ new CountAggregatorFactory( "count" ) }; - private static final AggregatorFactory[] defaultCombiningAggregatorFactories = new AggregatorFactory[]{ - defaultAggregatorFactories[0].getCombiningFactory() + private static final AggregatorFactory[] DEFAULT_COMBINING_AGGREGATOR_FACTORIES = new AggregatorFactory[]{ + DEFAULT_AGGREGATOR_FACTORIES[0].getCombiningFactory() }; @Test public void testCaseSensitivity() throws Exception { long timestamp = System.currentTimeMillis(); - IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(defaultAggregatorFactories)); + IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(DEFAULT_AGGREGATOR_FACTORIES)); populateIndex(timestamp, index); Assert.assertEquals(Arrays.asList("dim1", "dim2"), index.getDimensionNames()); @@ -675,7 +675,7 @@ public class IncrementalIndexTest @Test public void testConcurrentAdd() throws Exception { - final IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(defaultAggregatorFactories)); + final IncrementalIndex index = closerRule.closeLater(indexCreator.createIndex(DEFAULT_AGGREGATOR_FACTORIES)); final int threadCount = 10; final int elementsPerThread = 200; final int dimensionCount = 5; diff --git a/processing/src/test/java/org/apache/druid/segment/data/IndexedIntsTest.java b/processing/src/test/java/org/apache/druid/segment/data/IndexedIntsTest.java index c0c978382af..f39a03a45b1 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/IndexedIntsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/IndexedIntsTest.java @@ -32,7 +32,7 @@ import java.util.Collection; @RunWith(Parameterized.class) public class IndexedIntsTest { - private static final int[] array = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; + private static final int[] ARRAY = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; private final IndexedInts indexed; @@ -41,8 +41,8 @@ public class IndexedIntsTest { return Arrays.asList( new Object[][]{ - {VSizeColumnarInts.fromArray(array)}, - {new ArrayBasedIndexedInts(array)} + {VSizeColumnarInts.fromArray(ARRAY)}, + {new ArrayBasedIndexedInts(ARRAY)} } ); } @@ -57,9 +57,9 @@ public class IndexedIntsTest @Test public void testSanity() { - Assert.assertEquals(array.length, indexed.size()); - for (int i = 0; i < array.length; i++) { - Assert.assertEquals(array[i], indexed.get(i)); + Assert.assertEquals(ARRAY.length, indexed.size()); + for (int i = 0; i < ARRAY.length; i++) { + Assert.assertEquals(ARRAY[i], indexed.get(i)); } } } diff --git a/processing/src/test/java/org/apache/druid/segment/filter/InvalidFilteringTest.java b/processing/src/test/java/org/apache/druid/segment/filter/InvalidFilteringTest.java index bbd3fd7271e..7b0f40503eb 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/InvalidFilteringTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/InvalidFilteringTest.java @@ -64,20 +64,20 @@ public class InvalidFilteringTest extends BaseFilterTest ) ); - private static final InputRow row0 = PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0); - private static final InputRow row1 = PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "10", "dim2", ImmutableList.of())).get(0); - private static final InputRow row2 = PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "2", "dim2", ImmutableList.of(""))).get(0); - private static final InputRow row3 = PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0); - private static final InputRow row4 = PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0); - private static final InputRow row5 = PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "dim1", "abc")).get(0); + private static final InputRow ROW0 = PARSER.parseBatch(ImmutableMap.of("ts", 1L, "dim0", "1", "dim1", "", "dim2", ImmutableList.of("a", "b"))).get(0); + private static final InputRow ROW1 = PARSER.parseBatch(ImmutableMap.of("ts", 2L, "dim0", "2", "dim1", "10", "dim2", ImmutableList.of())).get(0); + private static final InputRow ROW2 = PARSER.parseBatch(ImmutableMap.of("ts", 3L, "dim0", "3", "dim1", "2", "dim2", ImmutableList.of(""))).get(0); + private static final InputRow ROW3 = PARSER.parseBatch(ImmutableMap.of("ts", 4L, "dim0", "4", "dim1", "1", "dim2", ImmutableList.of("a"))).get(0); + private static final InputRow ROW4 = PARSER.parseBatch(ImmutableMap.of("ts", 5L, "dim0", "5", "dim1", "def", "dim2", ImmutableList.of("c"))).get(0); + private static final InputRow ROW5 = PARSER.parseBatch(ImmutableMap.of("ts", 6L, "dim0", "6", "dim1", "abc")).get(0); private static final List ROWS = ImmutableList.of( - row0, - row1, - row2, - row3, - row4, - row5 + ROW0, + ROW1, + ROW2, + ROW3, + ROW4, + ROW5 ); public InvalidFilteringTest( diff --git a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java index 924ab996c8f..17607728b73 100644 --- a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java +++ b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexBenchmark.java @@ -81,13 +81,13 @@ import java.util.concurrent.atomic.AtomicLong; public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark { private static AggregatorFactory[] factories; - static final int dimensionCount = 5; + static final int DIMENSION_COUNT = 5; static { - final ArrayList ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1); + final ArrayList ingestAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1); ingestAggregatorFactories.add(new CountAggregatorFactory("rows")); - for (int i = 0; i < dimensionCount; ++i) { + for (int i = 0; i < DIMENSION_COUNT; ++i) { ingestAggregatorFactories.add( new LongSumAggregatorFactory( StringUtils.format("sumResult%s", i), @@ -302,9 +302,9 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark true, elementsPerThread * taskCount ); - final ArrayList queryAggregatorFactories = new ArrayList<>(dimensionCount + 1); + final ArrayList queryAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1); queryAggregatorFactories.add(new CountAggregatorFactory("rows")); - for (int i = 0; i < dimensionCount; ++i) { + for (int i = 0; i < DIMENSION_COUNT; ++i) { queryAggregatorFactories.add( new LongSumAggregatorFactory( StringUtils.format("sumResult%s", i), @@ -362,7 +362,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark currentlyRunning.incrementAndGet(); try { for (int i = 0; i < elementsPerThread; i++) { - incrementalIndex.add(getLongRow(timestamp + i, 1, dimensionCount)); + incrementalIndex.add(getLongRow(timestamp + i, 1, DIMENSION_COUNT)); } } catch (IndexSizeExceededException e) { @@ -428,7 +428,7 @@ public class OnheapIncrementalIndexBenchmark extends AbstractBenchmark final int expectedVal = elementsPerThread * taskCount; for (Result result : results) { Assert.assertEquals(elementsPerThread, result.getValue().getLongMetric("rows").intValue()); - for (int i = 0; i < dimensionCount; ++i) { + for (int i = 0; i < DIMENSION_COUNT; ++i) { Assert.assertEquals( StringUtils.format("Failed long sum on dimension %d", i), expectedVal, diff --git a/processing/src/test/java/org/apache/druid/segment/virtual/DummyStringVirtualColumnTest.java b/processing/src/test/java/org/apache/druid/segment/virtual/DummyStringVirtualColumnTest.java index b189bf02c3f..d2b3ccddeda 100644 --- a/processing/src/test/java/org/apache/druid/segment/virtual/DummyStringVirtualColumnTest.java +++ b/processing/src/test/java/org/apache/druid/segment/virtual/DummyStringVirtualColumnTest.java @@ -66,11 +66,11 @@ public class DummyStringVirtualColumnTest { QueryableIndexSegment queryableIndexSegment = new QueryableIndexSegment( TestIndex.getMMappedTestIndex(), - SegmentId.dummy(QueryRunnerTestHelper.dataSource) + SegmentId.dummy(QueryRunnerTestHelper.DATA_SOURCE) ); IncrementalIndexSegment incrementalIndexSegment = new IncrementalIndexSegment( TestIndex.getIncrementalTestIndex(), - SegmentId.dummy(QueryRunnerTestHelper.dataSource) + SegmentId.dummy(QueryRunnerTestHelper.DATA_SOURCE) ); mmappedSegments = Lists.newArrayList(queryableIndexSegment, queryableIndexSegment); @@ -242,10 +242,10 @@ public class DummyStringVirtualColumnTest private void testGroupBy(List segments, boolean enableRowBasedMethods, boolean enableColumnBasedMethods) { GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setGranularity(Granularities.ALL) .setVirtualColumns( - new DummyStringVirtualColumn(QueryRunnerTestHelper.marketDimension, VSTRING_DIM, + new DummyStringVirtualColumn(QueryRunnerTestHelper.MARKET_DIMENSION, VSTRING_DIM, enableRowBasedMethods, enableColumnBasedMethods, false, true ) @@ -276,11 +276,11 @@ public class DummyStringVirtualColumnTest ) { GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setGranularity(Granularities.ALL) .setVirtualColumns( new DummyStringVirtualColumn( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, VSTRING_DIM, enableRowBasedMethods, enableColumnBasedMethods, @@ -313,11 +313,11 @@ public class DummyStringVirtualColumnTest ) { GroupByQuery query = new GroupByQuery.Builder() - .setDataSource(QueryRunnerTestHelper.dataSource) + .setDataSource(QueryRunnerTestHelper.DATA_SOURCE) .setGranularity(Granularities.ALL) .setVirtualColumns( new DummyStringVirtualColumn( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, VSTRING_DIM, enableRowBasedMethods, enableColumnBasedMethods, @@ -349,14 +349,14 @@ public class DummyStringVirtualColumnTest ) { TopNQuery query = new TopNQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(Granularities.ALL) .dimension(VSTRING_DIM) .metric(COUNT) .threshold(1) .aggregators(new CountAggregatorFactory(COUNT)) .virtualColumns(new DummyStringVirtualColumn( - QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.MARKET_DIMENSION, VSTRING_DIM, enableRowBasedMethods, enableColumnBasedMethods, diff --git a/server/src/main/java/org/apache/druid/client/cache/MemcacheClientPool.java b/server/src/main/java/org/apache/druid/client/cache/MemcacheClientPool.java index 60b5f22d00a..b076e935672 100644 --- a/server/src/main/java/org/apache/druid/client/cache/MemcacheClientPool.java +++ b/server/src/main/java/org/apache/druid/client/cache/MemcacheClientPool.java @@ -43,11 +43,11 @@ final class MemcacheClientPool implements Supplier, Class> mappings = + private static final Map, Class> MAPPINGS = ImmutableMap., Class>builder() .put(TimeseriesQuery.class, TimeseriesQueryRunnerFactory.class) .put(SearchQuery.class, SearchQueryRunnerFactory.class) @@ -80,7 +80,7 @@ public class QueryRunnerFactoryModule extends QueryToolChestModule binder ); - for (Map.Entry, Class> entry : mappings.entrySet()) { + for (Map.Entry, Class> entry : MAPPINGS.entrySet()) { queryFactoryBinder.addBinding(entry.getKey()).to(entry.getValue()); binder.bind(entry.getValue()).in(LazySingleton.class); } diff --git a/server/src/main/java/org/apache/druid/initialization/Initialization.java b/server/src/main/java/org/apache/druid/initialization/Initialization.java index d64fc7408e7..760d9ad7836 100644 --- a/server/src/main/java/org/apache/druid/initialization/Initialization.java +++ b/server/src/main/java/org/apache/druid/initialization/Initialization.java @@ -90,9 +90,9 @@ import java.util.concurrent.ConcurrentHashMap; public class Initialization { private static final Logger log = new Logger(Initialization.class); - private static final ConcurrentHashMap loadersMap = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap LOADERS_MAP = new ConcurrentHashMap<>(); - private static final ConcurrentHashMap, Collection> extensionsMap = new ConcurrentHashMap<>(); + private static final ConcurrentHashMap, Collection> EXTENSIONS_MAP = new ConcurrentHashMap<>(); /** * @param clazz service class @@ -103,7 +103,7 @@ public class Initialization public static Collection getLoadedImplementations(Class clazz) { @SuppressWarnings("unchecked") - Collection retVal = (Collection) extensionsMap.get(clazz); + Collection retVal = (Collection) EXTENSIONS_MAP.get(clazz); if (retVal == null) { return new HashSet<>(); } @@ -113,13 +113,13 @@ public class Initialization @VisibleForTesting static void clearLoadedImplementations() { - extensionsMap.clear(); + EXTENSIONS_MAP.clear(); } @VisibleForTesting static Map getLoadersMap() { - return loadersMap; + return LOADERS_MAP; } /** @@ -138,7 +138,7 @@ public class Initialization { // It's not clear whether we should recompute modules even if they have been computed already for the serviceClass, // but that's how it used to be an preserving the old behaviour here. - Collection modules = extensionsMap.compute( + Collection modules = EXTENSIONS_MAP.compute( serviceClass, (serviceC, ignored) -> new ServiceLoadingFromExtensions<>(config, serviceC).implsToLoad ); @@ -292,7 +292,7 @@ public class Initialization */ public static URLClassLoader getClassLoaderForExtension(File extension, boolean useExtensionClassloaderFirst) { - return loadersMap.computeIfAbsent( + return LOADERS_MAP.computeIfAbsent( extension, theExtension -> makeClassLoaderForExtension(theExtension, useExtensionClassloaderFirst) ); diff --git a/server/src/main/java/org/apache/druid/segment/indexing/RealtimeTuningConfig.java b/server/src/main/java/org/apache/druid/segment/indexing/RealtimeTuningConfig.java index c38469fdfc1..c62c52bff5e 100644 --- a/server/src/main/java/org/apache/druid/segment/indexing/RealtimeTuningConfig.java +++ b/server/src/main/java/org/apache/druid/segment/indexing/RealtimeTuningConfig.java @@ -42,18 +42,18 @@ import java.io.File; */ public class RealtimeTuningConfig implements TuningConfig, AppenderatorConfig { - private static final int defaultMaxRowsInMemory = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY; - private static final Period defaultIntermediatePersistPeriod = new Period("PT10M"); - private static final Period defaultWindowPeriod = new Period("PT10M"); - private static final VersioningPolicy defaultVersioningPolicy = new IntervalStartVersioningPolicy(); - private static final RejectionPolicyFactory defaultRejectionPolicyFactory = new ServerTimeRejectionPolicyFactory(); - private static final int defaultMaxPendingPersists = 0; - private static final ShardSpec defaultShardSpec = new NumberedShardSpec(0, 1); - private static final IndexSpec defaultIndexSpec = new IndexSpec(); - private static final Boolean defaultReportParseExceptions = Boolean.FALSE; - private static final long defaultHandoffConditionTimeout = 0; - private static final long defaultAlertTimeout = 0; - private static final String defaultDedupColumn = null; + private static final int DEFAULT_MAX_ROWS_IN_MEMORY = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY; + private static final Period DEFAULT_INTERMEDIATE_PERSIST_PERIOD = new Period("PT10M"); + private static final Period DEFAULT_WINDOW_PERIOD = new Period("PT10M"); + private static final VersioningPolicy DEFAULT_VERSIONING_POLICY = new IntervalStartVersioningPolicy(); + private static final RejectionPolicyFactory DEFAULT_REJECTION_POLICY_FACTORY = new ServerTimeRejectionPolicyFactory(); + private static final int DEFAULT_MAX_PENDING_PERSISTS = 0; + private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1); + private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec(); + private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE; + private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0; + private static final long DEFAULT_ALERT_TIMEOUT = 0; + private static final String DEFAULT_DEDUP_COLUMN = null; private static File createNewBasePersistDirectory() { @@ -72,25 +72,25 @@ public class RealtimeTuningConfig implements TuningConfig, AppenderatorConfig public static RealtimeTuningConfig makeDefaultTuningConfig(final @Nullable File basePersistDirectory) { return new RealtimeTuningConfig( - defaultMaxRowsInMemory, + DEFAULT_MAX_ROWS_IN_MEMORY, 0L, - defaultIntermediatePersistPeriod, - defaultWindowPeriod, + DEFAULT_INTERMEDIATE_PERSIST_PERIOD, + DEFAULT_WINDOW_PERIOD, basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory, - defaultVersioningPolicy, - defaultRejectionPolicyFactory, - defaultMaxPendingPersists, - defaultShardSpec, - defaultIndexSpec, - defaultIndexSpec, + DEFAULT_VERSIONING_POLICY, + DEFAULT_REJECTION_POLICY_FACTORY, + DEFAULT_MAX_PENDING_PERSISTS, + DEFAULT_SHARD_SPEC, + DEFAULT_INDEX_SPEC, + DEFAULT_INDEX_SPEC, true, 0, 0, - defaultReportParseExceptions, - defaultHandoffConditionTimeout, - defaultAlertTimeout, + DEFAULT_REPORT_PARSE_EXCEPTIONS, + DEFAULT_HANDOFF_CONDITION_TIMEOUT, + DEFAULT_ALERT_TIMEOUT, null, - defaultDedupColumn + DEFAULT_DEDUP_COLUMN ); } @@ -139,38 +139,38 @@ public class RealtimeTuningConfig implements TuningConfig, AppenderatorConfig @JsonProperty("dedupColumn") @Nullable String dedupColumn ) { - this.maxRowsInMemory = maxRowsInMemory == null ? defaultMaxRowsInMemory : maxRowsInMemory; + this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY : maxRowsInMemory; // initializing this to 0, it will be lazily initialized to a value // @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long) this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory; this.intermediatePersistPeriod = intermediatePersistPeriod == null - ? defaultIntermediatePersistPeriod + ? DEFAULT_INTERMEDIATE_PERSIST_PERIOD : intermediatePersistPeriod; - this.windowPeriod = windowPeriod == null ? defaultWindowPeriod : windowPeriod; + this.windowPeriod = windowPeriod == null ? DEFAULT_WINDOW_PERIOD : windowPeriod; this.basePersistDirectory = basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory; - this.versioningPolicy = versioningPolicy == null ? defaultVersioningPolicy : versioningPolicy; + this.versioningPolicy = versioningPolicy == null ? DEFAULT_VERSIONING_POLICY : versioningPolicy; this.rejectionPolicyFactory = rejectionPolicyFactory == null - ? defaultRejectionPolicyFactory + ? DEFAULT_REJECTION_POLICY_FACTORY : rejectionPolicyFactory; - this.maxPendingPersists = maxPendingPersists == null ? defaultMaxPendingPersists : maxPendingPersists; - this.shardSpec = shardSpec == null ? defaultShardSpec : shardSpec; - this.indexSpec = indexSpec == null ? defaultIndexSpec : indexSpec; + this.maxPendingPersists = maxPendingPersists == null ? DEFAULT_MAX_PENDING_PERSISTS : maxPendingPersists; + this.shardSpec = shardSpec == null ? DEFAULT_SHARD_SPEC : shardSpec; + this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec; this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ? this.indexSpec : indexSpecForIntermediatePersists; this.mergeThreadPriority = mergeThreadPriority; this.persistThreadPriority = persistThreadPriority; this.reportParseExceptions = reportParseExceptions == null - ? defaultReportParseExceptions + ? DEFAULT_REPORT_PARSE_EXCEPTIONS : reportParseExceptions; this.handoffConditionTimeout = handoffConditionTimeout == null - ? defaultHandoffConditionTimeout + ? DEFAULT_HANDOFF_CONDITION_TIMEOUT : handoffConditionTimeout; Preconditions.checkArgument(this.handoffConditionTimeout >= 0, "handoffConditionTimeout must be >= 0"); - this.alertTimeout = alertTimeout == null ? defaultAlertTimeout : alertTimeout; + this.alertTimeout = alertTimeout == null ? DEFAULT_ALERT_TIMEOUT : alertTimeout; Preconditions.checkArgument(this.alertTimeout >= 0, "alertTimeout must be >= 0"); this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory; - this.dedupColumn = dedupColumn == null ? defaultDedupColumn : dedupColumn; + this.dedupColumn = dedupColumn == null ? DEFAULT_DEDUP_COLUMN : dedupColumn; } @Override diff --git a/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java b/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java index d81afe17e4b..bcffe5a3c7b 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/firehose/IngestSegmentFirehose.java @@ -133,7 +133,7 @@ public class IngestSegmentFirehose implements Firehose { final Map theEvent = Maps.newLinkedHashMap(); final long timestamp = timestampColumnSelector.getLong(); - theEvent.put(EventHolder.timestampKey, DateTimes.utc(timestamp)); + theEvent.put(EventHolder.TIMESTAMP_KEY, DateTimes.utc(timestamp)); for (Map.Entry dimSelector : dimSelectors.entrySet()) { diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumberSchool.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumberSchool.java index 1c7d9827d84..51249275064 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumberSchool.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/FlushingPlumberSchool.java @@ -46,7 +46,7 @@ import java.util.concurrent.ExecutorService; */ public class FlushingPlumberSchool extends RealtimePlumberSchool { - private static final Duration defaultFlushDuration = new Duration("PT1H"); + private static final Duration DEFAULT_FLUSH_DURATION = new Duration("PT1H"); private final Duration flushDuration; @@ -92,7 +92,7 @@ public class FlushingPlumberSchool extends RealtimePlumberSchool objectMapper ); - this.flushDuration = flushDuration == null ? defaultFlushDuration : flushDuration; + this.flushDuration = flushDuration == null ? DEFAULT_FLUSH_DURATION : flushDuration; this.emitter = emitter; this.conglomerate = conglomerate; this.segmentAnnouncer = segmentAnnouncer; diff --git a/server/src/main/java/org/apache/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java b/server/src/main/java/org/apache/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java index 76530494667..19fb37e3441 100644 --- a/server/src/main/java/org/apache/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java +++ b/server/src/main/java/org/apache/druid/segment/realtime/plumber/MessageTimeRejectionPolicyFactory.java @@ -38,7 +38,7 @@ public class MessageTimeRejectionPolicyFactory implements RejectionPolicyFactory private static class MessageTimeRejectionPolicy implements RejectionPolicy { - private static final AtomicLongFieldUpdater maxTimestampUpdater = + private static final AtomicLongFieldUpdater MAX_TIMESTAMP_UPDATER = AtomicLongFieldUpdater.newUpdater(MessageTimeRejectionPolicy.class, "maxTimestamp"); private final long windowMillis; private final Period windowPeriod; @@ -76,7 +76,7 @@ public class MessageTimeRejectionPolicyFactory implements RejectionPolicyFactory if (timestamp <= currentMaxTimestamp) { return currentMaxTimestamp; } - } while (!maxTimestampUpdater.compareAndSet(this, currentMaxTimestamp, timestamp)); + } while (!MAX_TIMESTAMP_UPDATER.compareAndSet(this, currentMaxTimestamp, timestamp)); return timestamp; } diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 7c1ec8b56f7..8d02098a5ea 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -97,7 +97,7 @@ public class JettyServerModule extends JerseyServletModule { private static final Logger log = new Logger(JettyServerModule.class); - private static final AtomicInteger activeConnections = new AtomicInteger(); + private static final AtomicInteger ACTIVE_CONNECTIONS = new AtomicInteger(); private static final String HTTP_1_1_STRING = "HTTP/1.1"; @Override @@ -337,7 +337,7 @@ public class JettyServerModule extends JerseyServletModule List monitoredConnFactories = new ArrayList<>(); for (ConnectionFactory cf : connector.getConnectionFactories()) { - monitoredConnFactories.add(new JettyMonitoringConnectionFactory(cf, activeConnections)); + monitoredConnFactories.add(new JettyMonitoringConnectionFactory(cf, ACTIVE_CONNECTIONS)); } connector.setConnectionFactories(monitoredConnFactories); } @@ -478,7 +478,7 @@ public class JettyServerModule extends JerseyServletModule { final ServiceMetricEvent.Builder builder = new ServiceMetricEvent.Builder(); MonitorUtils.addDimensionsToBuilder(builder, dimensions); - emitter.emit(builder.build("jetty/numOpenConnections", activeConnections.get())); + emitter.emit(builder.build("jetty/numOpenConnections", ACTIVE_CONNECTIONS.get())); return true; } } diff --git a/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java b/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java index b350d68dbaa..8fd83381f0b 100644 --- a/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingQueryRunnerTest.java @@ -98,7 +98,7 @@ public class CachingQueryRunnerTest new LongSumAggregatorFactory("impers", "imps") ); - private static final Object[] objects = new Object[]{ + private static final Object[] OBJECTS = new Object[]{ DateTimes.of("2011-01-05"), "a", 50, 4994, "b", 50, 4993, "c", 50, 4992, DateTimes.of("2011-01-06"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, DateTimes.of("2011-01-07"), "a", 50, 4991, "b", 50, 4990, "c", 50, 4989, @@ -128,8 +128,8 @@ public class CachingQueryRunnerTest @Test public void testCloseAndPopulate() throws Exception { - List expectedRes = makeTopNResults(false, objects); - List expectedCacheRes = makeTopNResults(true, objects); + List expectedRes = makeTopNResults(false, OBJECTS); + List expectedCacheRes = makeTopNResults(true, OBJECTS); TopNQueryBuilder builder = new TopNQueryBuilder() .dataSource("ds") @@ -154,17 +154,17 @@ public class CachingQueryRunnerTest { for (boolean descending : new boolean[]{false, true}) { TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() - .dataSource(QueryRunnerTestHelper.dataSource) - .granularity(QueryRunnerTestHelper.dayGran) - .intervals(QueryRunnerTestHelper.firstToThird) + .dataSource(QueryRunnerTestHelper.DATA_SOURCE) + .granularity(QueryRunnerTestHelper.DAY_GRAN) + .intervals(QueryRunnerTestHelper.FIRST_TO_THIRD) .aggregators( Arrays.asList( - QueryRunnerTestHelper.rowsCount, + QueryRunnerTestHelper.ROWS_COUNT, new LongSumAggregatorFactory( "idx", "index" ), - QueryRunnerTestHelper.qualityUniques + QueryRunnerTestHelper.QUALITY_UNIQUES ) ) .descending(descending) diff --git a/server/src/test/java/org/apache/druid/client/cache/CacheConfigTest.java b/server/src/test/java/org/apache/druid/client/cache/CacheConfigTest.java index c0c4204a072..d0ed06d2d6e 100644 --- a/server/src/test/java/org/apache/druid/client/cache/CacheConfigTest.java +++ b/server/src/test/java/org/apache/druid/client/cache/CacheConfigTest.java @@ -45,7 +45,7 @@ public class CacheConfigTest static Injector injector; static JsonConfigurator configurator; JsonConfigProvider configProvider; - private static final String propertyPrefix = "org.apache.druid.collections.test.cache"; + private static final String PROPERTY_PREFIX = "org.apache.druid.collections.test.cache"; @BeforeClass public static void populateStatics() @@ -66,7 +66,7 @@ public class CacheConfigTest @Override public void configure(Binder binder) { - JsonConfigProvider.bind(binder, propertyPrefix, CacheConfig.class); + JsonConfigProvider.bind(binder, PROPERTY_PREFIX, CacheConfig.class); } } @@ -76,16 +76,16 @@ public class CacheConfigTest public void setupTest() { properties.clear(); - configProvider = JsonConfigProvider.of(propertyPrefix, CacheConfig.class); + configProvider = JsonConfigProvider.of(PROPERTY_PREFIX, CacheConfig.class); } @Test public void testInjection1() { - properties.put(propertyPrefix + ".numBackgroundThreads", "5"); - properties.put(propertyPrefix + ".populateCache", "true"); - properties.put(propertyPrefix + ".useCache", "true"); - properties.put(propertyPrefix + ".unCacheable", "[\"a\",\"b\"]"); + properties.put(PROPERTY_PREFIX + ".numBackgroundThreads", "5"); + properties.put(PROPERTY_PREFIX + ".populateCache", "true"); + properties.put(PROPERTY_PREFIX + ".useCache", "true"); + properties.put(PROPERTY_PREFIX + ".unCacheable", "[\"a\",\"b\"]"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); @@ -98,9 +98,9 @@ public class CacheConfigTest @Test public void testInjection2() { - properties.put(propertyPrefix + ".numBackgroundThreads", "99"); - properties.put(propertyPrefix + ".populateCache", "false"); - properties.put(propertyPrefix + ".useCache", "false"); + properties.put(PROPERTY_PREFIX + ".numBackgroundThreads", "99"); + properties.put(PROPERTY_PREFIX + ".populateCache", "false"); + properties.put(PROPERTY_PREFIX + ".useCache", "false"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); @@ -113,7 +113,7 @@ public class CacheConfigTest @Test(expected = ProvisionException.class) public void testValidationError() { - properties.put(propertyPrefix + ".numBackgroundThreads", "-1"); + properties.put(PROPERTY_PREFIX + ".numBackgroundThreads", "-1"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); @@ -124,7 +124,7 @@ public class CacheConfigTest @Test(expected = ProvisionException.class) public void testValidationInsaneError() { - properties.put(propertyPrefix + ".numBackgroundThreads", "BABBA YAGA"); + properties.put(PROPERTY_PREFIX + ".numBackgroundThreads", "BABBA YAGA"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); throw new IllegalStateException("Should have already failed"); @@ -133,7 +133,7 @@ public class CacheConfigTest @Test(expected = ProvisionException.class) public void testTRUE() { - properties.put(propertyPrefix + ".populateCache", "TRUE"); + properties.put(PROPERTY_PREFIX + ".populateCache", "TRUE"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); throw new IllegalStateException("Should have already failed"); @@ -142,7 +142,7 @@ public class CacheConfigTest @Test(expected = ProvisionException.class) public void testFALSE() { - properties.put(propertyPrefix + ".populateCache", "FALSE"); + properties.put(PROPERTY_PREFIX + ".populateCache", "FALSE"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); throw new IllegalStateException("Should have already failed"); @@ -152,7 +152,7 @@ public class CacheConfigTest @Test(expected = ProvisionException.class) public void testFaLse() { - properties.put(propertyPrefix + ".populateCache", "FaLse"); + properties.put(PROPERTY_PREFIX + ".populateCache", "FaLse"); configProvider.inject(properties, configurator); CacheConfig config = configProvider.get().get(); throw new IllegalStateException("Should have already failed"); diff --git a/server/src/test/java/org/apache/druid/client/client/BatchServerInventoryViewTest.java b/server/src/test/java/org/apache/druid/client/client/BatchServerInventoryViewTest.java index d2de54fa827..7e1ae4e2bf4 100644 --- a/server/src/test/java/org/apache/druid/client/client/BatchServerInventoryViewTest.java +++ b/server/src/test/java/org/apache/druid/client/client/BatchServerInventoryViewTest.java @@ -81,10 +81,10 @@ import java.util.concurrent.atomic.AtomicInteger; */ public class BatchServerInventoryViewTest { - private static final String testBasePath = "/test"; + private static final String TEST_BASE_PATH = "/test"; public static final DateTime SEGMENT_INTERVAL_START = DateTimes.of("2013-01-01"); public static final int INITIAL_SEGMENTS = 100; - private static final Timing timing = new Timing(); + private static final Timing TIMING = new Timing(); private TestingCluster testingCluster; private CuratorFramework cf; @@ -113,7 +113,7 @@ public class BatchServerInventoryViewTest .build(); cf.start(); cf.blockUntilConnected(); - cf.create().creatingParentsIfNeeded().forPath(testBasePath); + cf.create().creatingParentsIfNeeded().forPath(TEST_BASE_PATH); jsonMapper = TestHelper.makeJsonMapper(); @@ -138,7 +138,7 @@ public class BatchServerInventoryViewTest @Override public String getBase() { - return testBasePath; + return TEST_BASE_PATH; } }; @@ -176,7 +176,7 @@ public class BatchServerInventoryViewTest @Override public String getBase() { - return testBasePath; + return TEST_BASE_PATH; } }, cf, @@ -192,7 +192,7 @@ public class BatchServerInventoryViewTest @Override public String getBase() { - return testBasePath; + return TEST_BASE_PATH; } }, cf, @@ -368,7 +368,7 @@ public class BatchServerInventoryViewTest testSegments.remove(segment2); waitForSync(filteredBatchServerInventoryView, testSegments); - timing.forWaiting().awaitLatch(removeCallbackLatch); + TIMING.forWaiting().awaitLatch(removeCallbackLatch); EasyMock.verify(callback); } @@ -390,7 +390,7 @@ public class BatchServerInventoryViewTest private static void waitForSync(BatchServerInventoryView batchServerInventoryView, Set testSegments) throws Exception { - final Timing forWaitingTiming = timing.forWaiting(); + final Timing forWaitingTiming = TIMING.forWaiting(); Stopwatch stopwatch = Stopwatch.createStarted(); while (Iterables.isEmpty(batchServerInventoryView.getInventory()) || Iterables.size(Iterables.get(batchServerInventoryView.getInventory(), 0).iterateAllSegments()) != @@ -405,7 +405,7 @@ public class BatchServerInventoryViewTest private void waitForUpdateEvents(int count) throws Exception { - final Timing forWaitingTiming = timing.forWaiting(); + final Timing forWaitingTiming = TIMING.forWaiting(); Stopwatch stopwatch = Stopwatch.createStarted(); while (inventoryUpdateCounter.get() != count) { Thread.sleep(100); @@ -469,7 +469,7 @@ public class BatchServerInventoryViewTest @Override public String getBase() { - return testBasePath; + return TEST_BASE_PATH; } }, announcer, diff --git a/server/src/test/java/org/apache/druid/curator/CuratorModuleTest.java b/server/src/test/java/org/apache/druid/curator/CuratorModuleTest.java index de1d5496520..b4ff237d844 100644 --- a/server/src/test/java/org/apache/druid/curator/CuratorModuleTest.java +++ b/server/src/test/java/org/apache/druid/curator/CuratorModuleTest.java @@ -42,9 +42,9 @@ import java.util.Properties; public final class CuratorModuleTest { - private static final String curatorHostKey = CuratorModule.CURATOR_CONFIG_PREFIX + ".host"; + private static final String CURATOR_HOST_KEY = CuratorModule.CURATOR_CONFIG_PREFIX + ".host"; - private static final String exhibitorHostsKey = CuratorModule.EXHIBITOR_CONFIG_PREFIX + ".hosts"; + private static final String EXHIBITOR_HOSTS_KEY = CuratorModule.EXHIBITOR_CONFIG_PREFIX + ".hosts"; @Test public void defaultEnsembleProvider() @@ -66,7 +66,7 @@ public final class CuratorModuleTest public void fixedZkHosts() { Properties props = new Properties(); - props.put(curatorHostKey, "hostA"); + props.put(CURATOR_HOST_KEY, "hostA"); Injector injector = newInjector(props); injector.getInstance(CuratorFramework.class); // initialize related components @@ -85,8 +85,8 @@ public final class CuratorModuleTest public void exhibitorEnsembleProvider() { Properties props = new Properties(); - props.put(curatorHostKey, "hostA"); - props.put(exhibitorHostsKey, "[\"hostB\"]"); + props.put(CURATOR_HOST_KEY, "hostA"); + props.put(EXHIBITOR_HOSTS_KEY, "[\"hostB\"]"); Injector injector = newInjector(props); injector.getInstance(CuratorFramework.class); // initialize related components @@ -101,8 +101,8 @@ public final class CuratorModuleTest public void emptyExhibitorHosts() { Properties props = new Properties(); - props.put(curatorHostKey, "hostB"); - props.put(exhibitorHostsKey, "[]"); + props.put(CURATOR_HOST_KEY, "hostB"); + props.put(EXHIBITOR_HOSTS_KEY, "[]"); Injector injector = newInjector(props); injector.getInstance(CuratorFramework.class); // initialize related components diff --git a/server/src/test/java/org/apache/druid/guice/JsonConfigTesterBase.java b/server/src/test/java/org/apache/druid/guice/JsonConfigTesterBase.java index 3bebdff3f9d..d21d9dd4751 100644 --- a/server/src/test/java/org/apache/druid/guice/JsonConfigTesterBase.java +++ b/server/src/test/java/org/apache/druid/guice/JsonConfigTesterBase.java @@ -48,7 +48,7 @@ import java.util.UUID; public abstract class JsonConfigTesterBase { - protected static final String configPrefix = "druid.test.prefix"; + protected static final String CONFIG_PREFIX = "druid.test.prefix"; protected Injector injector; protected final Class clazz = (Class) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[0]; @@ -60,7 +60,7 @@ public abstract class JsonConfigTesterBase { return StringUtils.format( "%s.%s", - configPrefix, fieldName + CONFIG_PREFIX, fieldName ); } protected static String getPropertyKey(Field field) @@ -84,7 +84,7 @@ public abstract class JsonConfigTesterBase binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/test"); binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0); binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1); - JsonConfigProvider.bind(binder, configPrefix, clazz); + JsonConfigProvider.bind(binder, CONFIG_PREFIX, clazz); } }; @@ -148,7 +148,7 @@ public abstract class JsonConfigTesterBase ImmutableList.of(simpleJsonConfigModule) ); configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - configProvider = JsonConfigProvider.of(configPrefix, clazz); + configProvider = JsonConfigProvider.of(CONFIG_PREFIX, clazz); } @Test diff --git a/server/src/test/java/org/apache/druid/initialization/ServerConfigSerdeTest.java b/server/src/test/java/org/apache/druid/initialization/ServerConfigSerdeTest.java index a67899c04aa..f9cbe028c05 100644 --- a/server/src/test/java/org/apache/druid/initialization/ServerConfigSerdeTest.java +++ b/server/src/test/java/org/apache/druid/initialization/ServerConfigSerdeTest.java @@ -26,14 +26,14 @@ import org.junit.Test; public class ServerConfigSerdeTest { - private static final DefaultObjectMapper objectMapper = new DefaultObjectMapper(); + private static final DefaultObjectMapper OBJECT_MAPPER = new DefaultObjectMapper(); @Test public void testSerde() throws Exception { ServerConfig defaultConfig = new ServerConfig(); - String defaultConfigJson = objectMapper.writeValueAsString(defaultConfig); - ServerConfig defaultConfig2 = objectMapper.readValue(defaultConfigJson, ServerConfig.class); + String defaultConfigJson = OBJECT_MAPPER.writeValueAsString(defaultConfig); + ServerConfig defaultConfig2 = OBJECT_MAPPER.readValue(defaultConfigJson, ServerConfig.class); Assert.assertEquals(defaultConfig, defaultConfig2); ServerConfig modifiedConfig = new ServerConfig( @@ -50,8 +50,8 @@ public class ServerConfigSerdeTest defaultConfig.getInflateBufferSize(), defaultConfig.getCompressionLevel() ); - String modifiedConfigJson = objectMapper.writeValueAsString(modifiedConfig); - ServerConfig modifiedConfig2 = objectMapper.readValue(modifiedConfigJson, ServerConfig.class); + String modifiedConfigJson = OBJECT_MAPPER.writeValueAsString(modifiedConfig); + ServerConfig modifiedConfig2 = OBJECT_MAPPER.readValue(modifiedConfigJson, ServerConfig.class); Assert.assertEquals(modifiedConfig, modifiedConfig2); Assert.assertEquals(999, modifiedConfig2.getNumThreads()); Assert.assertEquals(888, modifiedConfig2.getQueueSize()); diff --git a/server/src/test/java/org/apache/druid/initialization/ZkPathsConfigTest.java b/server/src/test/java/org/apache/druid/initialization/ZkPathsConfigTest.java index aa81645deea..0d4d26de4e9 100644 --- a/server/src/test/java/org/apache/druid/initialization/ZkPathsConfigTest.java +++ b/server/src/test/java/org/apache/druid/initialization/ZkPathsConfigTest.java @@ -46,21 +46,21 @@ public class ZkPathsConfigTest extends JsonConfigTesterBase { JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(configPrefix, ZkPathsConfig.class); + JsonConfigProvider zkPathsConfig = JsonConfigProvider.of(CONFIG_PREFIX, ZkPathsConfig.class); testProperties.clear(); String base = UUID.randomUUID().toString(); - testProperties.put(StringUtils.format("%s.base", configPrefix), base); + testProperties.put(StringUtils.format("%s.base", CONFIG_PREFIX), base); zkPathsConfig.inject(testProperties, configurator); propertyValues.clear(); - propertyValues.put(StringUtils.format("%s.base", configPrefix), base); - propertyValues.put(StringUtils.format("%s.propertiesPath", configPrefix), ZKPaths.makePath(base, "properties")); - propertyValues.put(StringUtils.format("%s.announcementsPath", configPrefix), ZKPaths.makePath(base, "announcements")); - propertyValues.put(StringUtils.format("%s.servedSegmentsPath", configPrefix), ZKPaths.makePath(base, "servedSegments")); - propertyValues.put(StringUtils.format("%s.liveSegmentsPath", configPrefix), ZKPaths.makePath(base, "segments")); - propertyValues.put(StringUtils.format("%s.coordinatorPath", configPrefix), ZKPaths.makePath(base, "coordinator")); - propertyValues.put(StringUtils.format("%s.loadQueuePath", configPrefix), ZKPaths.makePath(base, "loadQueue")); - propertyValues.put(StringUtils.format("%s.connectorPath", configPrefix), ZKPaths.makePath(base, "connector")); + propertyValues.put(StringUtils.format("%s.base", CONFIG_PREFIX), base); + propertyValues.put(StringUtils.format("%s.propertiesPath", CONFIG_PREFIX), ZKPaths.makePath(base, "properties")); + propertyValues.put(StringUtils.format("%s.announcementsPath", CONFIG_PREFIX), ZKPaths.makePath(base, "announcements")); + propertyValues.put(StringUtils.format("%s.servedSegmentsPath", CONFIG_PREFIX), ZKPaths.makePath(base, "servedSegments")); + propertyValues.put(StringUtils.format("%s.liveSegmentsPath", CONFIG_PREFIX), ZKPaths.makePath(base, "segments")); + propertyValues.put(StringUtils.format("%s.coordinatorPath", CONFIG_PREFIX), ZKPaths.makePath(base, "coordinator")); + propertyValues.put(StringUtils.format("%s.loadQueuePath", CONFIG_PREFIX), ZKPaths.makePath(base, "loadQueue")); + propertyValues.put(StringUtils.format("%s.connectorPath", CONFIG_PREFIX), ZKPaths.makePath(base, "connector")); ZkPathsConfig zkPathsConfigObj = zkPathsConfig.get().get(); validateEntries(zkPathsConfigObj); diff --git a/server/src/test/java/org/apache/druid/metadata/SQLMetadataConnectorTest.java b/server/src/test/java/org/apache/druid/metadata/SQLMetadataConnectorTest.java index a86c07e1c5f..a6c756b6671 100644 --- a/server/src/test/java/org/apache/druid/metadata/SQLMetadataConnectorTest.java +++ b/server/src/test/java/org/apache/druid/metadata/SQLMetadataConnectorTest.java @@ -43,7 +43,7 @@ public class SQLMetadataConnectorTest private TestDerbyConnector connector; private MetadataStorageTablesConfig tablesConfig; - private static final ObjectMapper jsonMapper = new ObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new ObjectMapper(); @Before public void setUp() @@ -205,7 +205,7 @@ public class SQLMetadataConnectorTest String pwd ) throws Exception { - return jsonMapper.readValue( + return JSON_MAPPER.readValue( "{" + "\"createTables\": \"" + createTables + "\"," + "\"host\": \"" + host + "\"," + diff --git a/server/src/test/java/org/apache/druid/metadata/SQLMetadataStorageActionHandlerTest.java b/server/src/test/java/org/apache/druid/metadata/SQLMetadataStorageActionHandlerTest.java index c34ff93ebbc..11ccc19f707 100644 --- a/server/src/test/java/org/apache/druid/metadata/SQLMetadataStorageActionHandlerTest.java +++ b/server/src/test/java/org/apache/druid/metadata/SQLMetadataStorageActionHandlerTest.java @@ -50,7 +50,7 @@ public class SQLMetadataStorageActionHandlerTest @Rule public final ExpectedException thrown = ExpectedException.none(); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); private SQLMetadataStorageActionHandler, Map, Map, Map> handler; @Before @@ -69,7 +69,7 @@ public class SQLMetadataStorageActionHandlerTest handler = new DerbyMetadataStorageActionHandler<>( connector, - jsonMapper, + JSON_MAPPER, new MetadataStorageActionHandlerTypes, Map, Map, Map>() { @Override diff --git a/server/src/test/java/org/apache/druid/metadata/SQLMetadataSupervisorManagerTest.java b/server/src/test/java/org/apache/druid/metadata/SQLMetadataSupervisorManagerTest.java index afc5f531cd7..38e65de4fd2 100644 --- a/server/src/test/java/org/apache/druid/metadata/SQLMetadataSupervisorManagerTest.java +++ b/server/src/test/java/org/apache/druid/metadata/SQLMetadataSupervisorManagerTest.java @@ -42,7 +42,7 @@ import java.util.Map; public class SQLMetadataSupervisorManagerTest { - private static final ObjectMapper mapper = new DefaultObjectMapper(); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); private TestDerbyConnector connector; private MetadataStorageTablesConfig tablesConfig; @@ -54,7 +54,7 @@ public class SQLMetadataSupervisorManagerTest @BeforeClass public static void setupStatic() { - mapper.registerSubtypes(TestSupervisorSpec.class); + MAPPER.registerSubtypes(TestSupervisorSpec.class); } @After @@ -81,7 +81,7 @@ public class SQLMetadataSupervisorManagerTest tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get(); connector.createSupervisorsTable(); - supervisorManager = new SQLMetadataSupervisorManager(mapper, connector, Suppliers.ofInstance(tablesConfig)); + supervisorManager = new SQLMetadataSupervisorManager(MAPPER, connector, Suppliers.ofInstance(tablesConfig)); } @Test diff --git a/server/src/test/java/org/apache/druid/query/lookup/LookupListeningAnnouncerConfigTest.java b/server/src/test/java/org/apache/druid/query/lookup/LookupListeningAnnouncerConfigTest.java index cf7573b988f..24e2e60b355 100644 --- a/server/src/test/java/org/apache/druid/query/lookup/LookupListeningAnnouncerConfigTest.java +++ b/server/src/test/java/org/apache/druid/query/lookup/LookupListeningAnnouncerConfigTest.java @@ -40,7 +40,7 @@ import java.util.Properties; public class LookupListeningAnnouncerConfigTest { - private static final String propertyBase = "some.property"; + private static final String PROPERTY_BASE = "some.property"; private final Injector injector = Initialization.makeInjectorWithModules( GuiceInjectors.makeStartupInjector(), ImmutableList.of( @@ -76,7 +76,7 @@ public class LookupListeningAnnouncerConfigTest { final JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); final JsonConfigProvider configProvider = JsonConfigProvider.of( - propertyBase, + PROPERTY_BASE, LookupListeningAnnouncerConfig.class ); configProvider.inject(properties, configurator); @@ -89,9 +89,9 @@ public class LookupListeningAnnouncerConfigTest { final String lookupTier = "some_tier"; final JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - properties.put(propertyBase + ".lookupTier", lookupTier); + properties.put(PROPERTY_BASE + ".lookupTier", lookupTier); final JsonConfigProvider configProvider = JsonConfigProvider.of( - propertyBase, + PROPERTY_BASE, LookupListeningAnnouncerConfig.class ); configProvider.inject(properties, configurator); @@ -103,9 +103,9 @@ public class LookupListeningAnnouncerConfigTest public void testFailsOnEmptyTier() { final JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - properties.put(propertyBase + ".lookupTier", ""); + properties.put(PROPERTY_BASE + ".lookupTier", ""); final JsonConfigProvider configProvider = JsonConfigProvider.of( - propertyBase, + PROPERTY_BASE, LookupListeningAnnouncerConfig.class ); configProvider.inject(properties, configurator); @@ -117,9 +117,9 @@ public class LookupListeningAnnouncerConfigTest public void testDatasourceInjection() { final JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - properties.put(propertyBase + ".lookupTierIsDatasource", "true"); + properties.put(PROPERTY_BASE + ".lookupTierIsDatasource", "true"); final JsonConfigProvider configProvider = JsonConfigProvider.of( - propertyBase, + PROPERTY_BASE, LookupListeningAnnouncerConfig.class ); configProvider.inject(properties, configurator); @@ -132,10 +132,10 @@ public class LookupListeningAnnouncerConfigTest { final String lookupTier = "some_tier"; final JsonConfigurator configurator = injector.getBinding(JsonConfigurator.class).getProvider().get(); - properties.put(propertyBase + ".lookupTier", lookupTier); - properties.put(propertyBase + ".lookupTierIsDatasource", "true"); + properties.put(PROPERTY_BASE + ".lookupTier", lookupTier); + properties.put(PROPERTY_BASE + ".lookupTierIsDatasource", "true"); final JsonConfigProvider configProvider = JsonConfigProvider.of( - propertyBase, + PROPERTY_BASE, LookupListeningAnnouncerConfig.class ); configProvider.inject(properties, configurator); diff --git a/server/src/test/java/org/apache/druid/query/lookup/MapLookupExtractorFactoryTest.java b/server/src/test/java/org/apache/druid/query/lookup/MapLookupExtractorFactoryTest.java index 75be6d48da7..4c7d8e7cd7d 100644 --- a/server/src/test/java/org/apache/druid/query/lookup/MapLookupExtractorFactoryTest.java +++ b/server/src/test/java/org/apache/druid/query/lookup/MapLookupExtractorFactoryTest.java @@ -31,24 +31,24 @@ public class MapLookupExtractorFactoryTest { private static final String KEY = "foo"; private static final String VALUE = "bar"; - private static final MapLookupExtractorFactory factory = new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), true); + private static final MapLookupExtractorFactory FACTORY = new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), true); @Test public void testSimpleExtraction() { - Assert.assertEquals(factory.get().apply(KEY), VALUE); - Assert.assertTrue(factory.get().isOneToOne()); + Assert.assertEquals(FACTORY.get().apply(KEY), VALUE); + Assert.assertTrue(FACTORY.get().isOneToOne()); } @Test public void testReplaces() { - Assert.assertFalse(factory.replaces(factory)); - Assert.assertFalse(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), true))); - Assert.assertTrue(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), false))); - Assert.assertTrue(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY + "1", VALUE), true))); - Assert.assertTrue(factory.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE + "1"), true))); - Assert.assertTrue(factory.replaces(null)); + Assert.assertFalse(FACTORY.replaces(FACTORY)); + Assert.assertFalse(FACTORY.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), true))); + Assert.assertTrue(FACTORY.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE), false))); + Assert.assertTrue(FACTORY.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY + "1", VALUE), true))); + Assert.assertTrue(FACTORY.replaces(new MapLookupExtractorFactory(ImmutableMap.of(KEY, VALUE + "1"), true))); + Assert.assertTrue(FACTORY.replaces(null)); } @Test diff --git a/server/src/test/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularityTest.java b/server/src/test/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularityTest.java index 4c8b3a36295..d7dff7a31cb 100644 --- a/server/src/test/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularityTest.java +++ b/server/src/test/java/org/apache/druid/segment/indexing/granularity/ArbitraryGranularityTest.java @@ -34,7 +34,7 @@ import java.util.List; public class ArbitraryGranularityTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); @Test public void testDefaultQueryGranularity() @@ -186,7 +186,7 @@ public class ArbitraryGranularityTest )); try { - final GranularitySpec rtSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), GranularitySpec.class); + final GranularitySpec rtSpec = JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), GranularitySpec.class); Assert.assertEquals("Round-trip", spec.bucketIntervals(), rtSpec.bucketIntervals()); } catch (Exception e) { diff --git a/server/src/test/java/org/apache/druid/segment/indexing/granularity/UniformGranularityTest.java b/server/src/test/java/org/apache/druid/segment/indexing/granularity/UniformGranularityTest.java index 1bf6d159f7d..402f7fd6d3e 100644 --- a/server/src/test/java/org/apache/druid/segment/indexing/granularity/UniformGranularityTest.java +++ b/server/src/test/java/org/apache/druid/segment/indexing/granularity/UniformGranularityTest.java @@ -39,7 +39,7 @@ import java.util.SortedSet; public class UniformGranularityTest { - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JOSN_MAPPER = new DefaultObjectMapper(); @Test public void testSimple() @@ -130,7 +130,7 @@ public class UniformGranularityTest ); try { - final GranularitySpec rtSpec = jsonMapper.readValue(jsonMapper.writeValueAsString(spec), GranularitySpec.class); + final GranularitySpec rtSpec = JOSN_MAPPER.readValue(JOSN_MAPPER.writeValueAsString(spec), GranularitySpec.class); Assert.assertEquals( "Round-trip bucketIntervals", spec.bucketIntervals(), diff --git a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java index a59bd830ce6..d95a74dcfbf 100644 --- a/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java +++ b/server/src/test/java/org/apache/druid/segment/realtime/appenderator/CommittedTest.java @@ -32,7 +32,7 @@ import java.util.Map; public class CommittedTest { - private static final ObjectMapper objectMapper = new DefaultObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper(); private static final SegmentIdWithShardSpec IDENTIFIER_OBJECT1 = new SegmentIdWithShardSpec( "foo", @@ -85,8 +85,8 @@ public class CommittedTest public void testSerde() throws Exception { final Committed committed = fixedInstance(); - final byte[] bytes = objectMapper.writeValueAsBytes(committed); - final Committed committed2 = objectMapper.readValue(bytes, Committed.class); + final byte[] bytes = OBJECT_MAPPER.writeValueAsBytes(committed); + final Committed committed2 = OBJECT_MAPPER.readValue(bytes, Committed.class); Assert.assertEquals("Round trip: overall", committed, committed2); Assert.assertEquals("Round trip: metadata", committed.getMetadata(), committed2.getMetadata()); Assert.assertEquals("Round trip: identifiers", committed.getHydrants().keySet(), committed2.getHydrants().keySet()); diff --git a/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java b/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java index f1bfc03a2ac..5fff015a03b 100644 --- a/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java +++ b/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java @@ -66,8 +66,8 @@ import java.util.zip.Deflater; public class AsyncManagementForwardingServletTest extends BaseJettyTest { - private static final ExpectedRequest coordinatorExpectedRequest = new ExpectedRequest(); - private static final ExpectedRequest overlordExpectedRequest = new ExpectedRequest(); + private static final ExpectedRequest COORDINATOR_EXPECTED_REQUEST = new ExpectedRequest(); + private static final ExpectedRequest OVERLORD_EXPECTED_REQUEST = new ExpectedRequest(); private static int coordinatorPort; private static int overlordPort; @@ -104,8 +104,8 @@ public class AsyncManagementForwardingServletTest extends BaseJettyTest coordinatorPort = SocketUtil.findOpenPortFrom(port + 1); overlordPort = SocketUtil.findOpenPortFrom(coordinatorPort + 1); - coordinator = makeTestServer(coordinatorPort, coordinatorExpectedRequest); - overlord = makeTestServer(overlordPort, overlordExpectedRequest); + coordinator = makeTestServer(coordinatorPort, COORDINATOR_EXPECTED_REQUEST); + overlord = makeTestServer(overlordPort, OVERLORD_EXPECTED_REQUEST); coordinator.start(); overlord.start(); @@ -117,8 +117,8 @@ public class AsyncManagementForwardingServletTest extends BaseJettyTest coordinator.stop(); overlord.stop(); - coordinatorExpectedRequest.reset(); - overlordExpectedRequest.reset(); + COORDINATOR_EXPECTED_REQUEST.reset(); + OVERLORD_EXPECTED_REQUEST.reset(); } @Override @@ -138,183 +138,183 @@ public class AsyncManagementForwardingServletTest extends BaseJettyTest @Test public void testCoordinatorDatasources() throws Exception { - coordinatorExpectedRequest.path = "/druid/coordinator/v1/datasources"; - coordinatorExpectedRequest.method = "GET"; - coordinatorExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + COORDINATOR_EXPECTED_REQUEST.path = "/druid/coordinator/v1/datasources"; + COORDINATOR_EXPECTED_REQUEST.method = "GET"; + COORDINATOR_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d%s", port, coordinatorExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d%s", port, COORDINATOR_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); - coordinatorExpectedRequest.headers.forEach(connection::setRequestProperty); + COORDINATOR_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testCoordinatorLoadStatus() throws Exception { - coordinatorExpectedRequest.path = "/druid/coordinator/v1/loadstatus"; - coordinatorExpectedRequest.query = "full"; - coordinatorExpectedRequest.method = "GET"; - coordinatorExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + COORDINATOR_EXPECTED_REQUEST.path = "/druid/coordinator/v1/loadstatus"; + COORDINATOR_EXPECTED_REQUEST.query = "full"; + COORDINATOR_EXPECTED_REQUEST.method = "GET"; + COORDINATOR_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); HttpURLConnection connection = ((HttpURLConnection) new URL(StringUtils.format( - "http://localhost:%d%s?%s", port, coordinatorExpectedRequest.path, coordinatorExpectedRequest.query + "http://localhost:%d%s?%s", port, COORDINATOR_EXPECTED_REQUEST.path, COORDINATOR_EXPECTED_REQUEST.query )).openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); - coordinatorExpectedRequest.headers.forEach(connection::setRequestProperty); + COORDINATOR_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testCoordinatorEnable() throws Exception { - coordinatorExpectedRequest.path = "/druid/coordinator/v1/datasources/myDatasource"; - coordinatorExpectedRequest.method = "POST"; + COORDINATOR_EXPECTED_REQUEST.path = "/druid/coordinator/v1/datasources/myDatasource"; + COORDINATOR_EXPECTED_REQUEST.method = "POST"; HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d%s", port, coordinatorExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d%s", port, COORDINATOR_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testCoordinatorDisable() throws Exception { - coordinatorExpectedRequest.path = "/druid/coordinator/v1/datasources/myDatasource/intervals/2016-06-27_2016-06-28"; - coordinatorExpectedRequest.method = "DELETE"; + COORDINATOR_EXPECTED_REQUEST.path = "/druid/coordinator/v1/datasources/myDatasource/intervals/2016-06-27_2016-06-28"; + COORDINATOR_EXPECTED_REQUEST.method = "DELETE"; HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d%s", port, coordinatorExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d%s", port, COORDINATOR_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testCoordinatorProxyStatus() throws Exception { - coordinatorExpectedRequest.path = "/status"; - coordinatorExpectedRequest.method = "GET"; - coordinatorExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + COORDINATOR_EXPECTED_REQUEST.path = "/status"; + COORDINATOR_EXPECTED_REQUEST.method = "GET"; + COORDINATOR_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d/proxy/coordinator%s", port, coordinatorExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d/proxy/coordinator%s", port, COORDINATOR_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); - coordinatorExpectedRequest.headers.forEach(connection::setRequestProperty); + COORDINATOR_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testCoordinatorProxySegments() throws Exception { - coordinatorExpectedRequest.path = "/druid/coordinator/v1/metadata/datasources/myDatasource/segments"; - coordinatorExpectedRequest.method = "POST"; - coordinatorExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); - coordinatorExpectedRequest.body = "[\"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\", \"2012-01-05T00:00:00.000/2012-01-07T00:00:00.000\"]"; + COORDINATOR_EXPECTED_REQUEST.path = "/druid/coordinator/v1/metadata/datasources/myDatasource/segments"; + COORDINATOR_EXPECTED_REQUEST.method = "POST"; + COORDINATOR_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + COORDINATOR_EXPECTED_REQUEST.body = "[\"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\", \"2012-01-05T00:00:00.000/2012-01-07T00:00:00.000\"]"; HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d/proxy/coordinator%s", port, coordinatorExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d/proxy/coordinator%s", port, COORDINATOR_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(coordinatorExpectedRequest.method); + connection.setRequestMethod(COORDINATOR_EXPECTED_REQUEST.method); - coordinatorExpectedRequest.headers.forEach(connection::setRequestProperty); + COORDINATOR_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); connection.setDoOutput(true); OutputStream os = connection.getOutputStream(); - os.write(coordinatorExpectedRequest.body.getBytes(StandardCharsets.UTF_8)); + os.write(COORDINATOR_EXPECTED_REQUEST.body.getBytes(StandardCharsets.UTF_8)); os.close(); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertTrue("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertTrue("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testOverlordPostTask() throws Exception { - overlordExpectedRequest.path = "/druid/indexer/v1/task"; - overlordExpectedRequest.method = "POST"; - overlordExpectedRequest.headers = ImmutableMap.of( + OVERLORD_EXPECTED_REQUEST.path = "/druid/indexer/v1/task"; + OVERLORD_EXPECTED_REQUEST.method = "POST"; + OVERLORD_EXPECTED_REQUEST.headers = ImmutableMap.of( "Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ=", "Content-Type", "application/json" ); - overlordExpectedRequest.body = "{\"type\": \"index\", \"spec\": \"stuffGoesHere\"}"; + OVERLORD_EXPECTED_REQUEST.body = "{\"type\": \"index\", \"spec\": \"stuffGoesHere\"}"; HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d%s", port, overlordExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d%s", port, OVERLORD_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(overlordExpectedRequest.method); + connection.setRequestMethod(OVERLORD_EXPECTED_REQUEST.method); - overlordExpectedRequest.headers.forEach(connection::setRequestProperty); + OVERLORD_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); connection.setDoOutput(true); OutputStream os = connection.getOutputStream(); - os.write(overlordExpectedRequest.body.getBytes(StandardCharsets.UTF_8)); + os.write(OVERLORD_EXPECTED_REQUEST.body.getBytes(StandardCharsets.UTF_8)); os.close(); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertFalse("coordinator called", coordinatorExpectedRequest.called); - Assert.assertTrue("overlord called", overlordExpectedRequest.called); + Assert.assertFalse("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertTrue("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testOverlordTaskStatus() throws Exception { - overlordExpectedRequest.path = "/druid/indexer/v1/task/myTaskId/status"; - overlordExpectedRequest.method = "GET"; - overlordExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + OVERLORD_EXPECTED_REQUEST.path = "/druid/indexer/v1/task/myTaskId/status"; + OVERLORD_EXPECTED_REQUEST.method = "GET"; + OVERLORD_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d%s", port, overlordExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d%s", port, OVERLORD_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(overlordExpectedRequest.method); + connection.setRequestMethod(OVERLORD_EXPECTED_REQUEST.method); - overlordExpectedRequest.headers.forEach(connection::setRequestProperty); + OVERLORD_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertFalse("coordinator called", coordinatorExpectedRequest.called); - Assert.assertTrue("overlord called", overlordExpectedRequest.called); + Assert.assertFalse("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertTrue("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test public void testOverlordProxyLeader() throws Exception { - overlordExpectedRequest.path = "/druid/indexer/v1/leader"; - overlordExpectedRequest.method = "GET"; - overlordExpectedRequest.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); + OVERLORD_EXPECTED_REQUEST.path = "/druid/indexer/v1/leader"; + OVERLORD_EXPECTED_REQUEST.method = "GET"; + OVERLORD_EXPECTED_REQUEST.headers = ImmutableMap.of("Authorization", "Basic bXl1c2VyOm15cGFzc3dvcmQ="); HttpURLConnection connection = ((HttpURLConnection) - new URL(StringUtils.format("http://localhost:%d/proxy/overlord%s", port, overlordExpectedRequest.path)) + new URL(StringUtils.format("http://localhost:%d/proxy/overlord%s", port, OVERLORD_EXPECTED_REQUEST.path)) .openConnection()); - connection.setRequestMethod(overlordExpectedRequest.method); + connection.setRequestMethod(OVERLORD_EXPECTED_REQUEST.method); - overlordExpectedRequest.headers.forEach(connection::setRequestProperty); + OVERLORD_EXPECTED_REQUEST.headers.forEach(connection::setRequestProperty); Assert.assertEquals(200, connection.getResponseCode()); - Assert.assertFalse("coordinator called", coordinatorExpectedRequest.called); - Assert.assertTrue("overlord called", overlordExpectedRequest.called); + Assert.assertFalse("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertTrue("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test @@ -325,8 +325,8 @@ public class AsyncManagementForwardingServletTest extends BaseJettyTest connection.setRequestMethod("GET"); Assert.assertEquals(400, connection.getResponseCode()); - Assert.assertFalse("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertFalse("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } @Test @@ -337,8 +337,8 @@ public class AsyncManagementForwardingServletTest extends BaseJettyTest connection.setRequestMethod("GET"); Assert.assertEquals(404, connection.getResponseCode()); - Assert.assertFalse("coordinator called", coordinatorExpectedRequest.called); - Assert.assertFalse("overlord called", overlordExpectedRequest.called); + Assert.assertFalse("coordinator called", COORDINATOR_EXPECTED_REQUEST.called); + Assert.assertFalse("overlord called", OVERLORD_EXPECTED_REQUEST.called); } private static Server makeTestServer(int port, ExpectedRequest expectedRequest) diff --git a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java index d1820cd70f6..af7c228bf06 100644 --- a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java @@ -78,12 +78,12 @@ import java.util.concurrent.Executors; */ public class QueryResourceTest { - private static final QueryToolChestWarehouse warehouse = new MapQueryToolChestWarehouse(ImmutableMap.of()); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); - private static final AuthenticationResult authenticationResult = new AuthenticationResult("druid", "druid", null, null); + private static final QueryToolChestWarehouse WAREHOUSE = new MapQueryToolChestWarehouse(ImmutableMap.of()); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); + private static final AuthenticationResult AUTHENTICATION_RESULT = new AuthenticationResult("druid", "druid", null, null); private final HttpServletRequest testServletRequest = EasyMock.createMock(HttpServletRequest.class); - public static final QuerySegmentWalker testSegmentWalker = new QuerySegmentWalker() + public static final QuerySegmentWalker TEST_SEGMENT_WALKER = new QuerySegmentWalker() { @Override public QueryRunner getQueryRunnerForIntervals(Query query, Iterable intervals) @@ -106,7 +106,7 @@ public class QueryResourceTest }; - private static final ServiceEmitter noopServiceEmitter = new NoopServiceEmitter(); + private static final ServiceEmitter NOOP_SERVICE_EMITTER = new NoopServiceEmitter(); private QueryResource queryResource; private QueryManager queryManager; @@ -115,7 +115,7 @@ public class QueryResourceTest @BeforeClass public static void staticSetup() { - EmittingLogger.registerEmitter(noopServiceEmitter); + EmittingLogger.registerEmitter(NOOP_SERVICE_EMITTER); } @Before @@ -129,24 +129,24 @@ public class QueryResourceTest testRequestLogger = new TestRequestLogger(); queryResource = new QueryResource( new QueryLifecycleFactory( - warehouse, - testSegmentWalker, - new DefaultGenericQueryMetricsFactory(jsonMapper), + WAREHOUSE, + TEST_SEGMENT_WALKER, + new DefaultGenericQueryMetricsFactory(JSON_MAPPER), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), AuthTestUtils.TEST_AUTHORIZER_MAPPER ), - jsonMapper, - jsonMapper, + JSON_MAPPER, + JSON_MAPPER, queryManager, new AuthConfig(), null, - new DefaultGenericQueryMetricsFactory(jsonMapper) + new DefaultGenericQueryMetricsFactory(JSON_MAPPER) ); } - private static final String simpleTimeSeriesQuery = "{\n" + private static final String SIMPLE_TIMESERIES_QUERY = "{\n" + " \"queryType\": \"timeseries\",\n" + " \"dataSource\": \"mmx_metrics\",\n" + " \"granularity\": \"hour\",\n" @@ -170,7 +170,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -178,7 +178,7 @@ public class QueryResourceTest EasyMock.replay(testServletRequest); Response response = queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); @@ -198,7 +198,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -210,7 +210,7 @@ public class QueryResourceTest EasyMock.replay(testServletRequest); Response response = queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); @@ -233,7 +233,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -245,7 +245,7 @@ public class QueryResourceTest EasyMock.replay(testServletRequest); Response response = queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); @@ -273,7 +273,7 @@ public class QueryResourceTest EasyMock.expect(smileRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(smileRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); smileRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -284,7 +284,7 @@ public class QueryResourceTest EasyMock.replay(smileRequest); Response response = queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")), null /*pretty*/, smileRequest ); @@ -317,7 +317,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, false); @@ -351,26 +351,26 @@ public class QueryResourceTest queryResource = new QueryResource( new QueryLifecycleFactory( - warehouse, - testSegmentWalker, - new DefaultGenericQueryMetricsFactory(jsonMapper), + WAREHOUSE, + TEST_SEGMENT_WALKER, + new DefaultGenericQueryMetricsFactory(JSON_MAPPER), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), - jsonMapper, - jsonMapper, + JSON_MAPPER, + JSON_MAPPER, queryManager, new AuthConfig(), authMapper, - new DefaultGenericQueryMetricsFactory(jsonMapper) + new DefaultGenericQueryMetricsFactory(JSON_MAPPER) ); try { queryResource.doPost( - new ByteArrayInputStream(simpleTimeSeriesQuery.getBytes("UTF-8")), + new ByteArrayInputStream(SIMPLE_TIMESERIES_QUERY.getBytes("UTF-8")), null /*pretty*/, testServletRequest ); @@ -387,7 +387,7 @@ public class QueryResourceTest final ByteArrayOutputStream baos = new ByteArrayOutputStream(); ((StreamingOutput) response.getEntity()).write(baos); - final List> responses = jsonMapper.readValue( + final List> responses = JSON_MAPPER.readValue( baos.toByteArray(), new TypeReference>>() {} ); @@ -419,7 +419,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -465,20 +465,20 @@ public class QueryResourceTest queryResource = new QueryResource( new QueryLifecycleFactory( - warehouse, - testSegmentWalker, - new DefaultGenericQueryMetricsFactory(jsonMapper), + WAREHOUSE, + TEST_SEGMENT_WALKER, + new DefaultGenericQueryMetricsFactory(JSON_MAPPER), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), - jsonMapper, - jsonMapper, + JSON_MAPPER, + JSON_MAPPER, queryManager, new AuthConfig(), authMapper, - new DefaultGenericQueryMetricsFactory(jsonMapper) + new DefaultGenericQueryMetricsFactory(JSON_MAPPER) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," @@ -544,7 +544,7 @@ public class QueryResourceTest EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes(); EasyMock.expect(testServletRequest.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT)) - .andReturn(authenticationResult) + .andReturn(AUTHENTICATION_RESULT) .anyTimes(); testServletRequest.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true); @@ -587,20 +587,20 @@ public class QueryResourceTest queryResource = new QueryResource( new QueryLifecycleFactory( - warehouse, - testSegmentWalker, - new DefaultGenericQueryMetricsFactory(jsonMapper), + WAREHOUSE, + TEST_SEGMENT_WALKER, + new DefaultGenericQueryMetricsFactory(JSON_MAPPER), new NoopServiceEmitter(), testRequestLogger, new AuthConfig(), authMapper ), - jsonMapper, - jsonMapper, + JSON_MAPPER, + JSON_MAPPER, queryManager, new AuthConfig(), authMapper, - new DefaultGenericQueryMetricsFactory(jsonMapper) + new DefaultGenericQueryMetricsFactory(JSON_MAPPER) ); final String queryString = "{\"queryType\":\"timeBoundary\", \"dataSource\":\"allow\"," diff --git a/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java b/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java index 465bb73ad49..317e9c7f3ac 100644 --- a/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java +++ b/server/src/test/java/org/apache/druid/server/SegmentManagerTest.java @@ -57,7 +57,7 @@ import java.util.stream.Collectors; public class SegmentManagerTest { - private static final SegmentLoader segmentLoader = new SegmentLoader() + private static final SegmentLoader SEGMENT_LOADER = new SegmentLoader() { @Override public boolean isSegmentLoaded(DataSegment segment) @@ -138,7 +138,7 @@ public class SegmentManagerTest } } - private static final List segments = ImmutableList.of( + private static final List SEGMENTS = ImmutableList.of( new DataSegment( "small_source", Intervals.of("0/1000"), @@ -203,8 +203,8 @@ public class SegmentManagerTest @Before public void setup() { - segmentManager = new SegmentManager(segmentLoader); - executor = Executors.newFixedThreadPool(segments.size()); + segmentManager = new SegmentManager(SEGMENT_LOADER); + executor = Executors.newFixedThreadPool(SEGMENTS.size()); } @After @@ -216,7 +216,7 @@ public class SegmentManagerTest @Test public void testLoadSegment() throws ExecutionException, InterruptedException, SegmentLoadingException { - final List> futures = segments.stream() + final List> futures = SEGMENTS.stream() .map( segment -> executor.submit( () -> segmentManager.loadSegment(segment) @@ -228,17 +228,17 @@ public class SegmentManagerTest Assert.assertTrue(eachFuture.get()); } - assertResult(segments); + assertResult(SEGMENTS); } @Test public void testDropSegment() throws SegmentLoadingException, ExecutionException, InterruptedException { - for (DataSegment eachSegment : segments) { + for (DataSegment eachSegment : SEGMENTS) { Assert.assertTrue(segmentManager.loadSegment(eachSegment)); } - final List> futures = ImmutableList.of(segments.get(0), segments.get(2)).stream() + final List> futures = ImmutableList.of(SEGMENTS.get(0), SEGMENTS.get(2)).stream() .map( segment -> executor.submit( () -> { @@ -254,17 +254,17 @@ public class SegmentManagerTest } assertResult( - ImmutableList.of(segments.get(1), segments.get(3), segments.get(4)) + ImmutableList.of(SEGMENTS.get(1), SEGMENTS.get(3), SEGMENTS.get(4)) ); } @Test public void testLoadDropSegment() throws SegmentLoadingException, ExecutionException, InterruptedException { - Assert.assertTrue(segmentManager.loadSegment(segments.get(0))); - Assert.assertTrue(segmentManager.loadSegment(segments.get(2))); + Assert.assertTrue(segmentManager.loadSegment(SEGMENTS.get(0))); + Assert.assertTrue(segmentManager.loadSegment(SEGMENTS.get(2))); - final List> loadFutures = ImmutableList.of(segments.get(1), segments.get(3), segments.get(4)) + final List> loadFutures = ImmutableList.of(SEGMENTS.get(1), SEGMENTS.get(3), SEGMENTS.get(4)) .stream() .map( segment -> executor.submit( @@ -272,7 +272,7 @@ public class SegmentManagerTest ) ) .collect(Collectors.toList()); - final List> dropFutures = ImmutableList.of(segments.get(0), segments.get(2)).stream() + final List> dropFutures = ImmutableList.of(SEGMENTS.get(0), SEGMENTS.get(2)).stream() .map( segment -> executor.submit( () -> { @@ -291,27 +291,27 @@ public class SegmentManagerTest } assertResult( - ImmutableList.of(segments.get(1), segments.get(3), segments.get(4)) + ImmutableList.of(SEGMENTS.get(1), SEGMENTS.get(3), SEGMENTS.get(4)) ); } @Test public void testLoadDuplicatedSegmentsSequentially() throws SegmentLoadingException { - for (DataSegment segment : segments) { + for (DataSegment segment : SEGMENTS) { Assert.assertTrue(segmentManager.loadSegment(segment)); } // try to load an existing segment - Assert.assertFalse(segmentManager.loadSegment(segments.get(0))); + Assert.assertFalse(segmentManager.loadSegment(SEGMENTS.get(0))); - assertResult(segments); + assertResult(SEGMENTS); } @Test public void testLoadDuplicatedSegmentsInParallel() throws ExecutionException, InterruptedException, SegmentLoadingException { - final List> futures = ImmutableList.of(segments.get(0), segments.get(0), segments.get(0)) + final List> futures = ImmutableList.of(SEGMENTS.get(0), SEGMENTS.get(0), SEGMENTS.get(0)) .stream() .map( segment -> executor.submit( @@ -330,18 +330,18 @@ public class SegmentManagerTest Assert.assertEquals(1, numSucceededFutures); Assert.assertEquals(2, numFailedFutures); - assertResult(ImmutableList.of(segments.get(0))); + assertResult(ImmutableList.of(SEGMENTS.get(0))); } @Test public void testNonExistingSegmentsSequentially() throws SegmentLoadingException { - Assert.assertTrue(segmentManager.loadSegment(segments.get(0))); + Assert.assertTrue(segmentManager.loadSegment(SEGMENTS.get(0))); // try to drop a non-existing segment of different data source - segmentManager.dropSegment(segments.get(2)); + segmentManager.dropSegment(SEGMENTS.get(2)); assertResult( - ImmutableList.of(segments.get(0)) + ImmutableList.of(SEGMENTS.get(0)) ); } @@ -349,8 +349,8 @@ public class SegmentManagerTest public void testNonExistingSegmentsInParallel() throws SegmentLoadingException, ExecutionException, InterruptedException { - segmentManager.loadSegment(segments.get(0)); - final List> futures = ImmutableList.of(segments.get(1), segments.get(2)) + segmentManager.loadSegment(SEGMENTS.get(0)); + final List> futures = ImmutableList.of(SEGMENTS.get(1), SEGMENTS.get(2)) .stream() .map( segment -> executor.submit( @@ -366,16 +366,16 @@ public class SegmentManagerTest future.get(); } - assertResult(ImmutableList.of(segments.get(0))); + assertResult(ImmutableList.of(SEGMENTS.get(0))); } @Test public void testRemoveEmptyTimeline() throws SegmentLoadingException { - segmentManager.loadSegment(segments.get(0)); - assertResult(ImmutableList.of(segments.get(0))); + segmentManager.loadSegment(SEGMENTS.get(0)); + assertResult(ImmutableList.of(SEGMENTS.get(0))); Assert.assertEquals(1, segmentManager.getDataSources().size()); - segmentManager.dropSegment(segments.get(0)); + segmentManager.dropSegment(SEGMENTS.get(0)); Assert.assertEquals(0, segmentManager.getDataSources().size()); } @@ -434,7 +434,7 @@ public class SegmentManagerTest segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk( - ReferenceCountingSegment.wrapSegment(segmentLoader.getSegment(segment), segment.getShardSpec()) + ReferenceCountingSegment.wrapSegment(SEGMENT_LOADER.getSegment(segment), segment.getShardSpec()) ) ); } diff --git a/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java b/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java index f1a65e7f970..c38f0ad3400 100644 --- a/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordination/coordination/BatchDataSegmentAnnouncerTest.java @@ -68,9 +68,9 @@ import java.util.concurrent.atomic.AtomicInteger; */ public class BatchDataSegmentAnnouncerTest { - private static final String testBasePath = "/test"; - private static final String testSegmentsPath = "/test/segments/id"; - private static final Joiner joiner = Joiner.on("/"); + private static final String TEST_BASE_PATH = "/test"; + private static final String TEST_SEGMENTS_PATH = "/test/segments/id"; + private static final Joiner JOINER = Joiner.on("/"); private static final int NUM_THREADS = 4; private TestingCluster testingCluster; @@ -100,7 +100,7 @@ public class BatchDataSegmentAnnouncerTest .build(); cf.start(); cf.blockUntilConnected(); - cf.create().creatingParentsIfNeeded().forPath(testBasePath); + cf.create().creatingParentsIfNeeded().forPath(TEST_BASE_PATH); jsonMapper = TestHelper.makeJsonMapper(); @@ -154,7 +154,7 @@ public class BatchDataSegmentAnnouncerTest @Override public String getBase() { - return testBasePath; + return TEST_BASE_PATH; } }, announcer, @@ -187,17 +187,17 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.announceSegment(firstSegment); - List zNodes = cf.getChildren().forPath(testSegmentsPath); + List zNodes = cf.getChildren().forPath(TEST_SEGMENTS_PATH); for (String zNode : zNodes) { - Set segments = segmentReader.read(joiner.join(testSegmentsPath, zNode)); + Set segments = segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode)); Assert.assertEquals(segments.iterator().next(), firstSegment); } segmentAnnouncer.announceSegment(secondSegment); for (String zNode : zNodes) { - Set segments = segmentReader.read(joiner.join(testSegmentsPath, zNode)); + Set segments = segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode)); Assert.assertEquals(Sets.newHashSet(firstSegment, secondSegment), segments); } @@ -210,13 +210,13 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.unannounceSegment(firstSegment); for (String zNode : zNodes) { - Set segments = segmentReader.read(joiner.join(testSegmentsPath, zNode)); + Set segments = segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode)); Assert.assertEquals(segments.iterator().next(), secondSegment); } segmentAnnouncer.unannounceSegment(secondSegment); - Assert.assertTrue(cf.getChildren().forPath(testSegmentsPath).isEmpty()); + Assert.assertTrue(cf.getChildren().forPath(TEST_SEGMENTS_PATH).isEmpty()); snapshot = segmentAnnouncer.getSegmentChangesSince( snapshot.getCounter() @@ -240,10 +240,10 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.announceSegment(firstSegment); - List zNodes = cf.getChildren().forPath(testSegmentsPath); + List zNodes = cf.getChildren().forPath(TEST_SEGMENTS_PATH); for (String zNode : zNodes) { - DataSegment announcedSegment = Iterables.getOnlyElement(segmentReader.read(joiner.join(testSegmentsPath, zNode))); + DataSegment announcedSegment = Iterables.getOnlyElement(segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode))); Assert.assertEquals(announcedSegment, firstSegment); Assert.assertTrue(announcedSegment.getDimensions().isEmpty()); Assert.assertTrue(announcedSegment.getMetrics().isEmpty()); @@ -251,7 +251,7 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.unannounceSegment(firstSegment); - Assert.assertTrue(cf.getChildren().forPath(testSegmentsPath).isEmpty()); + Assert.assertTrue(cf.getChildren().forPath(TEST_SEGMENTS_PATH).isEmpty()); } @Test @@ -263,17 +263,17 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.announceSegment(firstSegment); - List zNodes = cf.getChildren().forPath(testSegmentsPath); + List zNodes = cf.getChildren().forPath(TEST_SEGMENTS_PATH); for (String zNode : zNodes) { - DataSegment announcedSegment = Iterables.getOnlyElement(segmentReader.read(joiner.join(testSegmentsPath, zNode))); + DataSegment announcedSegment = Iterables.getOnlyElement(segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode))); Assert.assertEquals(announcedSegment, firstSegment); Assert.assertNull(announcedSegment.getLoadSpec()); } segmentAnnouncer.unannounceSegment(firstSegment); - Assert.assertTrue(cf.getChildren().forPath(testSegmentsPath).isEmpty()); + Assert.assertTrue(cf.getChildren().forPath(TEST_SEGMENTS_PATH).isEmpty()); } @Test @@ -292,12 +292,12 @@ public class BatchDataSegmentAnnouncerTest maxBytesPerNode.set(prevMax); } - List zNodes = cf.getChildren().forPath(testSegmentsPath); + List zNodes = cf.getChildren().forPath(TEST_SEGMENTS_PATH); Assert.assertEquals(20, zNodes.size()); Set segments = Sets.newHashSet(testSegments); for (String zNode : zNodes) { - for (DataSegment segment : segmentReader.read(joiner.join(testSegmentsPath, zNode))) { + for (DataSegment segment : segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode))) { Assert.assertTrue("Invalid segment " + segment, segments.remove(segment)); } } @@ -322,13 +322,13 @@ public class BatchDataSegmentAnnouncerTest { segmentAnnouncer.announceSegments(testSegments); - List zNodes = cf.getChildren().forPath(testSegmentsPath); + List zNodes = cf.getChildren().forPath(TEST_SEGMENTS_PATH); Assert.assertEquals(2, zNodes.size()); Set allSegments = new HashSet<>(); for (String zNode : zNodes) { - allSegments.addAll(segmentReader.read(joiner.join(testSegmentsPath, zNode))); + allSegments.addAll(segmentReader.read(JOINER.join(TEST_SEGMENTS_PATH, zNode))); } Assert.assertEquals(allSegments, testSegments); @@ -344,7 +344,7 @@ public class BatchDataSegmentAnnouncerTest segmentAnnouncer.unannounceSegments(testSegments); - Assert.assertTrue(cf.getChildren().forPath(testSegmentsPath).isEmpty()); + Assert.assertTrue(cf.getChildren().forPath(TEST_SEGMENTS_PATH).isEmpty()); if (testHistory) { snapshot = segmentAnnouncer.getSegmentChangesSince( diff --git a/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java b/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java index 3074fb2c0fe..1ff0f367d47 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/CostBalancerStrategyTest.java @@ -48,7 +48,7 @@ import java.util.stream.IntStream; public class CostBalancerStrategyTest { - private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); + private static final Interval DAY = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment @@ -111,7 +111,7 @@ public class CostBalancerStrategyTest */ public static DataSegment getSegment(int index) { - return getSegment(index, "DUMMY", day); + return getSegment(index, "DUMMY", DAY); } public static DataSegment getSegment(int index, String dataSource, Interval interval) diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DataSourceCompactionConfigTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DataSourceCompactionConfigTest.java index 1f7676940d9..45b3b5a8265 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DataSourceCompactionConfigTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DataSourceCompactionConfigTest.java @@ -37,7 +37,7 @@ import java.io.IOException; public class DataSourceCompactionConfigTest { - private static final ObjectMapper objectMapper = new DefaultObjectMapper(); + private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper(); @Rule public final ExpectedException expectedException = ExpectedException.none(); @@ -56,8 +56,8 @@ public class DataSourceCompactionConfigTest null, ImmutableMap.of("key", "val") ); - final String json = objectMapper.writeValueAsString(config); - final DataSourceCompactionConfig fromJson = objectMapper.readValue(json, DataSourceCompactionConfig.class); + final String json = OBJECT_MAPPER.writeValueAsString(config); + final DataSourceCompactionConfig fromJson = OBJECT_MAPPER.readValue(json, DataSourceCompactionConfig.class); Assert.assertEquals(config.getDataSource(), fromJson.getDataSource()); Assert.assertEquals(25, fromJson.getTaskPriority()); @@ -84,8 +84,8 @@ public class DataSourceCompactionConfigTest null, ImmutableMap.of("key", "val") ); - final String json = objectMapper.writeValueAsString(config); - final DataSourceCompactionConfig fromJson = objectMapper.readValue(json, DataSourceCompactionConfig.class); + final String json = OBJECT_MAPPER.writeValueAsString(config); + final DataSourceCompactionConfig fromJson = OBJECT_MAPPER.readValue(json, DataSourceCompactionConfig.class); Assert.assertEquals(config.getDataSource(), fromJson.getDataSource()); Assert.assertEquals(25, fromJson.getTaskPriority()); @@ -102,10 +102,10 @@ public class DataSourceCompactionConfigTest public void testSerdeUserCompactTuningConfig() throws IOException { final UserCompactTuningConfig config = new UserCompactTuningConfig(null, null, null, null, null, null); - final String json = objectMapper.writeValueAsString(config); + final String json = OBJECT_MAPPER.writeValueAsString(config); // Check maxRowsPerSegment doesn't exist in the JSON string Assert.assertFalse(json.contains("maxRowsPerSegment")); - final UserCompactTuningConfig fromJson = objectMapper.readValue(json, UserCompactTuningConfig.class); + final UserCompactTuningConfig fromJson = OBJECT_MAPPER.readValue(json, UserCompactTuningConfig.class); Assert.assertEquals(config, fromJson); } @@ -130,8 +130,8 @@ public class DataSourceCompactionConfigTest ), ImmutableMap.of("key", "val") ); - final String json = objectMapper.writeValueAsString(config); - final DataSourceCompactionConfig fromJson = objectMapper.readValue(json, DataSourceCompactionConfig.class); + final String json = OBJECT_MAPPER.writeValueAsString(config); + final DataSourceCompactionConfig fromJson = OBJECT_MAPPER.readValue(json, DataSourceCompactionConfig.class); Assert.assertEquals(config.getDataSource(), fromJson.getDataSource()); Assert.assertEquals(25, fromJson.getTaskPriority()); @@ -213,8 +213,8 @@ public class DataSourceCompactionConfigTest ImmutableMap.of("key", "val") ); - final String json = objectMapper.writeValueAsString(config); - final DataSourceCompactionConfig fromJson = objectMapper.readValue(json, DataSourceCompactionConfig.class); + final String json = OBJECT_MAPPER.writeValueAsString(config); + final DataSourceCompactionConfig fromJson = OBJECT_MAPPER.readValue(json, DataSourceCompactionConfig.class); Assert.assertEquals(config.getDataSource(), fromJson.getDataSource()); Assert.assertEquals(25, fromJson.getTaskPriority()); @@ -244,8 +244,8 @@ public class DataSourceCompactionConfigTest 3000L ); - final String json = objectMapper.writeValueAsString(tuningConfig); - final UserCompactTuningConfig fromJson = objectMapper.readValue(json, UserCompactTuningConfig.class); + final String json = OBJECT_MAPPER.writeValueAsString(tuningConfig); + final UserCompactTuningConfig fromJson = OBJECT_MAPPER.readValue(json, UserCompactTuningConfig.class); Assert.assertEquals(tuningConfig, fromJson); } } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java index bd2aa6eaf14..986ec98d2e3 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DiskNormalizedCostBalancerStrategyTest.java @@ -43,7 +43,7 @@ import java.util.stream.IntStream; public class DiskNormalizedCostBalancerStrategyTest { - private static final Interval day = Intervals.of("2015-01-01T00/2015-01-01T01"); + private static final Interval DAY = Intervals.of("2015-01-01T00/2015-01-01T01"); /** * Create Druid cluster with serverCount servers having maxSegments segments each, and 1 server with 98 segment @@ -105,7 +105,7 @@ public class DiskNormalizedCostBalancerStrategyTest */ public static DataSegment getSegment(int index) { - return getSegment(index, "DUMMY", day); + return getSegment(index, "DUMMY", DAY); } public static DataSegment getSegment(int index, String dataSource, Interval interval) diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidClusterTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidClusterTest.java index 798c89107b2..b1e5dfdaedc 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidClusterTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidClusterTest.java @@ -42,7 +42,7 @@ import java.util.stream.Collectors; public class DruidClusterTest { - private static final List segments = ImmutableList.of( + private static final List SEGMENTS = ImmutableList.of( new DataSegment( "test", Intervals.of("2015-04-12/2015-04-13"), @@ -67,26 +67,26 @@ public class DruidClusterTest ) ); - private static final Map dataSources = ImmutableMap.of( - "src1", new ImmutableDruidDataSource("src1", Collections.emptyMap(), Collections.singletonList(segments.get(0))), - "src2", new ImmutableDruidDataSource("src2", Collections.emptyMap(), Collections.singletonList(segments.get(0))) + private static final Map DATA_SOURCES = ImmutableMap.of( + "src1", new ImmutableDruidDataSource("src1", Collections.emptyMap(), Collections.singletonList(SEGMENTS.get(0))), + "src2", new ImmutableDruidDataSource("src2", Collections.emptyMap(), Collections.singletonList(SEGMENTS.get(0))) ); - private static final ServerHolder newRealtime = new ServerHolder( + private static final ServerHolder NEW_REALTIME = new ServerHolder( new ImmutableDruidServer( new DruidServerMetadata("name1", "host2", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() ); - private static final ServerHolder newHistorical = new ServerHolder( + private static final ServerHolder NEW_HISTORICAL = new ServerHolder( new ImmutableDruidServer( new DruidServerMetadata("name1", "host2", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -104,7 +104,7 @@ public class DruidClusterTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -116,7 +116,7 @@ public class DruidClusterTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -131,11 +131,11 @@ public class DruidClusterTest Assert.assertEquals(1, cluster.getHistoricals().values().stream().mapToInt(Collection::size).sum()); Assert.assertEquals(1, cluster.getRealtimes().size()); - cluster.add(newRealtime); + cluster.add(NEW_REALTIME); Assert.assertEquals(1, cluster.getHistoricals().values().stream().mapToInt(Collection::size).sum()); Assert.assertEquals(2, cluster.getRealtimes().size()); - cluster.add(newHistorical); + cluster.add(NEW_HISTORICAL); Assert.assertEquals(2, cluster.getHistoricals().values().stream().mapToInt(Collection::size).sum()); Assert.assertEquals(2, cluster.getRealtimes().size()); } @@ -143,8 +143,8 @@ public class DruidClusterTest @Test public void testGetAllServers() { - cluster.add(newRealtime); - cluster.add(newHistorical); + cluster.add(NEW_REALTIME); + cluster.add(NEW_HISTORICAL); final Set expectedRealtimes = cluster.getRealtimes(); final Map> expectedHistoricals = cluster.getHistoricals(); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/ServerHolderTest.java b/server/src/test/java/org/apache/druid/server/coordinator/ServerHolderTest.java index 68fca426406..fcebbdee820 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/ServerHolderTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/ServerHolderTest.java @@ -37,7 +37,7 @@ import java.util.Map; public class ServerHolderTest { - private static final List segments = ImmutableList.of( + private static final List SEGMENTS = ImmutableList.of( new DataSegment( "test", Intervals.of("2015-04-12/2015-04-13"), @@ -62,9 +62,9 @@ public class ServerHolderTest ) ); - private static final Map dataSources = ImmutableMap.of( - "src1", new ImmutableDruidDataSource("src1", Collections.emptyMap(), Collections.singletonList(segments.get(0))), - "src2", new ImmutableDruidDataSource("src2", Collections.emptyMap(), Collections.singletonList(segments.get(1))) + private static final Map DATA_SOURCES = ImmutableMap.of( + "src1", new ImmutableDruidDataSource("src1", Collections.emptyMap(), Collections.singletonList(SEGMENTS.get(0))), + "src2", new ImmutableDruidDataSource("src2", Collections.emptyMap(), Collections.singletonList(SEGMENTS.get(1))) ); @Test @@ -75,7 +75,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -86,7 +86,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -97,7 +97,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 1000L, ServerType.HISTORICAL, "tier1", 0), 990L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -108,7 +108,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 50L, ServerType.HISTORICAL, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -126,7 +126,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 100L, ServerType.HISTORICAL, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -136,7 +136,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name2", "host1", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -146,7 +146,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host2", null, 200L, ServerType.HISTORICAL, "tier1", 0), 100L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -156,7 +156,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 200L, ServerType.HISTORICAL, "tier2", 0), 100L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() @@ -166,7 +166,7 @@ public class ServerHolderTest new ImmutableDruidServer( new DruidServerMetadata("name1", "host1", null, 100L, ServerType.REALTIME, "tier1", 0), 0L, - ImmutableMap.of("src1", dataSources.get("src1")), + ImmutableMap.of("src1", DATA_SOURCES.get("src1")), 1 ), new LoadQueuePeonTester() diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java index b624cf30dd5..929a9ea1c54 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/LoadRuleTest.java @@ -72,15 +72,15 @@ import java.util.concurrent.atomic.AtomicInteger; public class LoadRuleTest { private static final Logger log = new Logger(LoadRuleTest.class); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); - private static final ServiceEmitter emitter = new ServiceEmitter( + private static final ServiceEmitter EMITTER = new ServiceEmitter( "service", "host", new LoggingEmitter( log, LoggingEmitter.Level.ERROR, - jsonMapper + JSON_MAPPER ) ); @@ -94,8 +94,8 @@ public class LoadRuleTest @Before public void setUp() { - EmittingLogger.registerEmitter(emitter); - emitter.start(); + EmittingLogger.registerEmitter(EMITTER); + EMITTER.start(); throttler = EasyMock.createMock(ReplicationThrottler.class); exec = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1)); @@ -108,7 +108,7 @@ public class LoadRuleTest public void tearDown() throws Exception { exec.shutdown(); - emitter.close(); + EMITTER.close(); } @Test @@ -737,11 +737,11 @@ public class LoadRuleTest return mockPeon; } - private static final AtomicInteger serverId = new AtomicInteger(); + private static final AtomicInteger SERVER_ID = new AtomicInteger(); private static DruidServer createServer(String tier) { - int serverId = LoadRuleTest.serverId.incrementAndGet(); + int serverId = LoadRuleTest.SERVER_ID.incrementAndGet(); return new DruidServer( "server" + serverId, "127.0.0.1:800" + serverId, diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropBeforeRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropBeforeRuleTest.java index c0d64cb11bc..b4aa17d7b45 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropBeforeRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropBeforeRuleTest.java @@ -32,7 +32,7 @@ import org.junit.Test; public class PeriodDropBeforeRuleTest { - private static final DataSegment.Builder builder = DataSegment.builder() + private static final DataSegment.Builder BUILDER = DataSegment.builder() .dataSource("test") .version(DateTimes.of("2012-12-31T01:00:00").toString()) .shardSpec(NoneShardSpec.instance()); @@ -60,25 +60,25 @@ public class PeriodDropBeforeRuleTest Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusDays(3), now.minusDays(2))).build(), + BUILDER.interval(new Interval(now.minusDays(3), now.minusDays(2))).build(), now ) ); Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusDays(2), now.minusDays(1))).build(), + BUILDER.interval(new Interval(now.minusDays(2), now.minusDays(1))).build(), now ) ); Assert.assertFalse( rule.appliesTo( - builder.interval(new Interval(now.minusDays(1), now)).build(), + BUILDER.interval(new Interval(now.minusDays(1), now)).build(), now ) ); Assert.assertFalse( rule.appliesTo( - builder.interval(new Interval(now, now.plusDays(1))).build(), + BUILDER.interval(new Interval(now, now.plusDays(1))).build(), now ) ); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropRuleTest.java index 1e2dafc7dbd..06054a1af87 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodDropRuleTest.java @@ -32,7 +32,7 @@ import org.junit.Test; */ public class PeriodDropRuleTest { - private static final DataSegment.Builder builder = DataSegment.builder() + private static final DataSegment.Builder BUILDER = DataSegment.builder() .dataSource("test") .version(DateTimes.of("2012-12-31T01:00:00").toString()) .shardSpec(NoneShardSpec.instance()); @@ -48,7 +48,7 @@ public class PeriodDropRuleTest Assert.assertTrue( rule.appliesTo( - builder.interval( + BUILDER.interval( new Interval( now.minusDays(2), now.minusDays(1) @@ -59,7 +59,7 @@ public class PeriodDropRuleTest ); Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusYears(100), now.minusDays(1))) + BUILDER.interval(new Interval(now.minusYears(100), now.minusDays(1))) .build(), now ) @@ -77,28 +77,28 @@ public class PeriodDropRuleTest Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusWeeks(1), now.minusDays(1))) + BUILDER.interval(new Interval(now.minusWeeks(1), now.minusDays(1))) .build(), now ) ); Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusDays(1), now)) + BUILDER.interval(new Interval(now.minusDays(1), now)) .build(), now ) ); Assert.assertFalse( rule.appliesTo( - builder.interval(new Interval(now.minusYears(1), now.minusDays(1))) + BUILDER.interval(new Interval(now.minusYears(1), now.minusDays(1))) .build(), now ) ); Assert.assertFalse( rule.appliesTo( - builder.interval(new Interval(now.minusMonths(2), now.minusDays(1))) + BUILDER.interval(new Interval(now.minusMonths(2), now.minusDays(1))) .build(), now ) @@ -120,13 +120,13 @@ public class PeriodDropRuleTest Assert.assertTrue( includeFutureRule.appliesTo( - builder.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), + BUILDER.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), now ) ); Assert.assertFalse( notIncludeFutureRule.appliesTo( - builder.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), + BUILDER.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), now ) ); diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodLoadRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodLoadRuleTest.java index 53eaa2ef623..91c7510429f 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodLoadRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/PeriodLoadRuleTest.java @@ -37,7 +37,7 @@ import org.junit.Test; */ public class PeriodLoadRuleTest { - private static final DataSegment.Builder builder = DataSegment.builder() + private static final DataSegment.Builder BUILDER = DataSegment.builder() .dataSource("test") .version(DateTimes.nowUtc().toString()) .shardSpec(NoneShardSpec.instance()); @@ -52,9 +52,9 @@ public class PeriodLoadRuleTest ImmutableMap.of("", 0) ); - Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("2012-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("1000-01-01/2012-12-31")).build(), now)); - Assert.assertTrue(rule.appliesTo(builder.interval(Intervals.of("0500-01-01/2100-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(BUILDER.interval(Intervals.of("2012-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(BUILDER.interval(Intervals.of("1000-01-01/2012-12-31")).build(), now)); + Assert.assertTrue(rule.appliesTo(BUILDER.interval(Intervals.of("0500-01-01/2100-12-31")).build(), now)); } @Test @@ -67,17 +67,17 @@ public class PeriodLoadRuleTest ImmutableMap.of("", 0) ); - Assert.assertTrue(rule.appliesTo(builder.interval(new Interval(now.minusWeeks(1), now)).build(), now)); + Assert.assertTrue(rule.appliesTo(BUILDER.interval(new Interval(now.minusWeeks(1), now)).build(), now)); Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusDays(1), now.plusDays(1))) + BUILDER.interval(new Interval(now.minusDays(1), now.plusDays(1))) .build(), now ) ); Assert.assertFalse( rule.appliesTo( - builder.interval(new Interval(now.plusDays(1), now.plusDays(2))) + BUILDER.interval(new Interval(now.plusDays(1), now.plusDays(2))) .build(), now ) @@ -96,13 +96,13 @@ public class PeriodLoadRuleTest Assert.assertTrue( rule.appliesTo( - builder.interval(new Interval(now.minusWeeks(1), now.plusWeeks(1))).build(), + BUILDER.interval(new Interval(now.minusWeeks(1), now.plusWeeks(1))).build(), now ) ); Assert.assertTrue( rule.appliesTo( - builder.interval( + BUILDER.interval( new Interval(now.minusMonths(1).minusWeeks(1), now.minusMonths(1).plusWeeks(1)) ).build(), now @@ -127,13 +127,13 @@ public class PeriodLoadRuleTest Assert.assertTrue( includeFutureRule.appliesTo( - builder.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), + BUILDER.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), now ) ); Assert.assertFalse( notIncludeFutureRule.appliesTo( - builder.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), + BUILDER.interval(new Interval(now.plusDays(1), now.plusDays(2))).build(), now ) ); diff --git a/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java b/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java index d1e673d6502..8c5a7b91c91 100644 --- a/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/http/LookupCoordinatorResourceTest.java @@ -49,7 +49,7 @@ import java.util.Set; public class LookupCoordinatorResourceTest { - private static final ObjectMapper mapper = new DefaultObjectMapper(); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); private static final String LOOKUP_TIER = "lookupTier"; private static final String LOOKUP_NAME = "lookupName"; private static final LookupExtractorFactoryMapContainer SINGLE_LOOKUP = new LookupExtractorFactoryMapContainer( @@ -69,7 +69,7 @@ public class LookupCoordinatorResourceTest @Override public InputStream openStream() throws IOException { - return new ByteArrayInputStream(StringUtils.toUtf8(mapper.writeValueAsString(SINGLE_TIER_MAP))); + return new ByteArrayInputStream(StringUtils.toUtf8(MAPPER.writeValueAsString(SINGLE_TIER_MAP))); } }; private static final ByteSource EMPTY_MAP_SOURCE = new ByteSource() @@ -77,7 +77,7 @@ public class LookupCoordinatorResourceTest @Override public InputStream openStream() throws IOException { - return new ByteArrayInputStream(StringUtils.toUtf8(mapper.writeValueAsString(SINGLE_LOOKUP))); + return new ByteArrayInputStream(StringUtils.toUtf8(MAPPER.writeValueAsString(SINGLE_LOOKUP))); } }; @@ -100,8 +100,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getTiers(false); Assert.assertEquals(200, response.getStatus()); @@ -118,8 +118,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getTiers(false); Assert.assertEquals(404, response.getStatus()); @@ -136,8 +136,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getTiers(false); Assert.assertEquals(500, response.getStatus()); @@ -156,8 +156,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getTiers(true); Assert.assertEquals(200, response.getStatus()); @@ -178,8 +178,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getTiers(true); Assert.assertEquals(500, response.getStatus()); @@ -202,8 +202,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificLookup(LOOKUP_TIER, LOOKUP_NAME); Assert.assertEquals(200, response.getStatus()); @@ -219,8 +219,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(LOOKUP_TIER, true); Assert.assertEquals(200, response.getStatus()); @@ -239,8 +239,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificLookup(LOOKUP_TIER, LOOKUP_NAME); Assert.assertEquals(404, response.getStatus()); @@ -255,8 +255,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); Assert.assertEquals(400, lookupCoordinatorResource.getSpecificLookup("foo", null).getStatus()); Assert.assertEquals(400, lookupCoordinatorResource.getSpecificLookup("foo", "").getStatus()); @@ -277,8 +277,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificLookup(LOOKUP_TIER, LOOKUP_NAME); Assert.assertEquals(500, response.getStatus()); @@ -308,8 +308,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.deleteTier( LOOKUP_TIER, @@ -351,8 +351,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, @@ -396,8 +396,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, @@ -442,8 +442,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.deleteLookup( LOOKUP_TIER, @@ -472,8 +472,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup("foo", null, null, null, null).getStatus()); Assert.assertEquals(400, lookupCoordinatorResource.deleteLookup(null, null, null, null, null).getStatus()); @@ -504,8 +504,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager, request); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), @@ -548,8 +548,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), @@ -592,8 +592,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.updateAllLookups( SINGLE_TIER_MAP_SOURCE.openStream(), @@ -638,8 +638,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, @@ -686,8 +686,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, @@ -735,8 +735,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.createOrUpdateLookup( LOOKUP_TIER, @@ -773,8 +773,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); EasyMock.replay(lookupCoordinatorManager, request); @@ -824,8 +824,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(LOOKUP_TIER, false); Assert.assertEquals(200, response.getStatus()); @@ -844,8 +844,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(tier, false); Assert.assertEquals(404, response.getStatus()); @@ -860,8 +860,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(tier, false); Assert.assertEquals(400, response.getStatus()); @@ -878,8 +878,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(tier, false); Assert.assertEquals(404, response.getStatus()); @@ -897,8 +897,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificTier(tier, false); Assert.assertEquals(500, response.getStatus()); @@ -920,8 +920,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getAllLookupsStatus(false); @@ -953,8 +953,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getLookupStatusForTier(LOOKUP_TIER, false); @@ -983,8 +983,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificLookupStatus(LOOKUP_TIER, LOOKUP_NAME, false); @@ -1001,8 +1001,8 @@ public class LookupCoordinatorResourceTest { final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( EasyMock.createStrictMock(LookupCoordinatorManager.class), - mapper, - mapper + MAPPER, + MAPPER ); HostAndPort newNode = HostAndPort.fromParts("localhost", 4352); @@ -1023,8 +1023,8 @@ public class LookupCoordinatorResourceTest { final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( EasyMock.createStrictMock(LookupCoordinatorManager.class), - mapper, - mapper + MAPPER, + MAPPER ); HostAndPort newNode = HostAndPort.fromParts("localhost", 4352); @@ -1054,8 +1054,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getAllNodesStatus(false); @@ -1086,8 +1086,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getNodesStatusInTier(LOOKUP_TIER); @@ -1114,8 +1114,8 @@ public class LookupCoordinatorResourceTest final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getSpecificNodeStatus(LOOKUP_TIER, LOOKUP_NODE); @@ -1160,8 +1160,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getAllLookupSpecs(); Assert.assertEquals(Status.OK.getStatusCode(), response.getStatus()); @@ -1181,8 +1181,8 @@ public class LookupCoordinatorResourceTest EasyMock.replay(lookupCoordinatorManager); final LookupCoordinatorResource lookupCoordinatorResource = new LookupCoordinatorResource( lookupCoordinatorManager, - mapper, - mapper + MAPPER, + MAPPER ); final Response response = lookupCoordinatorResource.getAllLookupSpecs(); Assert.assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus()); diff --git a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java index 9d938f012a3..b7e40bfe217 100644 --- a/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java +++ b/server/src/test/java/org/apache/druid/server/log/LoggingRequestLoggerTest.java @@ -63,8 +63,8 @@ import java.util.Map; // Mostly just test that it doesn't crash public class LoggingRequestLoggerTest { - private static final ObjectMapper mapper = new DefaultObjectMapper(); - private static final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); + private static final ByteArrayOutputStream BAOS = new ByteArrayOutputStream(); private static Appender appender; final DateTime timestamp = DateTimes.of("2016-01-01T00:00:00Z"); @@ -157,7 +157,7 @@ public class LoggingRequestLoggerTest appender = OutputStreamAppender .newBuilder() .setName("test stream") - .setTarget(baos) + .setTarget(BAOS) .setLayout(JsonLayout.createLayout(false, true, false, true, true, StandardCharsets.UTF_8)) .build(); final Logger logger = (Logger) @@ -169,7 +169,7 @@ public class LoggingRequestLoggerTest @After public void tearDown() { - baos.reset(); + BAOS.reset(); } @AfterClass @@ -193,7 +193,7 @@ public class LoggingRequestLoggerTest { final LoggingRequestLogger requestLogger = new LoggingRequestLogger(new DefaultObjectMapper(), true, false); requestLogger.logNativeQuery(logLine); - final Map map = readContextMap(baos.toByteArray()); + final Map map = readContextMap(BAOS.toByteArray()); Assert.assertEquals("datasource", map.get("dataSource")); Assert.assertEquals("PT86400S", map.get("duration")); Assert.assertEquals("false", map.get("hasFilters")); @@ -209,7 +209,7 @@ public class LoggingRequestLoggerTest { final LoggingRequestLogger requestLogger = new LoggingRequestLogger(new DefaultObjectMapper(), true, true); requestLogger.logNativeQuery(logLine); - final Map map = readContextMap(baos.toByteArray()); + final Map map = readContextMap(BAOS.toByteArray()); Assert.assertEquals("datasource", map.get("dataSource")); Assert.assertEquals("PT86400S", map.get("duration")); Assert.assertEquals("false", map.get("hasFilters")); @@ -230,7 +230,7 @@ public class LoggingRequestLoggerTest remoteAddr, queryStats )); - final Map map = readContextMap(baos.toByteArray()); + final Map map = readContextMap(BAOS.toByteArray()); Assert.assertEquals("datasource", map.get("dataSource")); Assert.assertEquals("PT86400S", map.get("duration")); Assert.assertEquals("false", map.get("hasFilters")); @@ -251,7 +251,7 @@ public class LoggingRequestLoggerTest remoteAddr, queryStats )); - final Map map = readContextMap(baos.toByteArray()); + final Map map = readContextMap(BAOS.toByteArray()); Assert.assertEquals("datasource", map.get("dataSource")); Assert.assertEquals("PT86400S", map.get("duration")); Assert.assertEquals("false", map.get("hasFilters")); @@ -272,7 +272,7 @@ public class LoggingRequestLoggerTest remoteAddr, queryStats )); - final Map map = readContextMap(baos.toByteArray()); + final Map map = readContextMap(BAOS.toByteArray()); Assert.assertEquals("A,B", map.get("dataSource")); Assert.assertEquals("true", map.get("isNested")); Assert.assertEquals("PT86400S", map.get("duration")); @@ -285,7 +285,7 @@ public class LoggingRequestLoggerTest private static Map readContextMap(byte[] bytes) throws Exception { - final Map rawMap = mapper.readValue(bytes, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); + final Map rawMap = MAPPER.readValue(bytes, JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT); final Object contextMap = rawMap.get("contextMap"); if (contextMap == null) { return null; diff --git a/services/src/main/java/org/apache/druid/cli/ExportMetadata.java b/services/src/main/java/org/apache/druid/cli/ExportMetadata.java index 27f5509570e..30906860e17 100644 --- a/services/src/main/java/org/apache/druid/cli/ExportMetadata.java +++ b/services/src/main/java/org/apache/druid/cli/ExportMetadata.java @@ -126,9 +126,9 @@ public class ExportMetadata extends GuiceRunnable private static final Logger log = new Logger(ExportMetadata.class); - private static final CSVParser parser = new CSVParser(); + private static final CSVParser PARSER = new CSVParser(); - private static final ObjectMapper jsonMapper = new DefaultObjectMapper(); + private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); public ExportMetadata() { @@ -189,9 +189,9 @@ public class ExportMetadata extends GuiceRunnable public void run() { InjectableValues.Std injectableValues = new InjectableValues.Std(); - injectableValues.addValue(ObjectMapper.class, jsonMapper); + injectableValues.addValue(ObjectMapper.class, JSON_MAPPER); injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT); - jsonMapper.setInjectableValues(injectableValues); + JSON_MAPPER.setInjectableValues(injectableValues); if (hadoopStorageDirectory != null && newLocalPath != null) { throw new IllegalArgumentException( @@ -269,7 +269,7 @@ public class ExportMetadata extends GuiceRunnable ) { String line; while ((line = reader.readLine()) != null) { - String[] parsed = parser.parseLine(line); + String[] parsed = PARSER.parseLine(line); StringBuilder newLineBuilder = new StringBuilder(); newLineBuilder.append(parsed[0]).append(","); //dataSource @@ -300,7 +300,7 @@ public class ExportMetadata extends GuiceRunnable ) { String line; while ((line = reader.readLine()) != null) { - String[] parsed = parser.parseLine(line); + String[] parsed = PARSER.parseLine(line); StringBuilder newLineBuilder = new StringBuilder(); newLineBuilder.append(parsed[0]).append(","); //id @@ -331,7 +331,7 @@ public class ExportMetadata extends GuiceRunnable ) { String line; while ((line = reader.readLine()) != null) { - String[] parsed = parser.parseLine(line); + String[] parsed = PARSER.parseLine(line); StringBuilder newLineBuilder = new StringBuilder(); newLineBuilder.append(parsed[0]).append(","); //name @@ -360,7 +360,7 @@ public class ExportMetadata extends GuiceRunnable ) { String line; while ((line = reader.readLine()) != null) { - String[] parsed = parser.parseLine(line); + String[] parsed = PARSER.parseLine(line); StringBuilder newLineBuilder = new StringBuilder(); newLineBuilder.append(parsed[0]).append(","); //id @@ -392,7 +392,7 @@ public class ExportMetadata extends GuiceRunnable ) { String line; while ((line = reader.readLine()) != null) { - String[] parsed = parser.parseLine(line); + String[] parsed = PARSER.parseLine(line); StringBuilder newLineBuilder = new StringBuilder(); newLineBuilder.append(parsed[0]).append(","); //id newLineBuilder.append(parsed[1]).append(","); //dataSource @@ -425,7 +425,7 @@ public class ExportMetadata extends GuiceRunnable String payload ) throws IOException { - DataSegment segment = jsonMapper.readValue(DatatypeConverter.parseHexBinary(payload), DataSegment.class); + DataSegment segment = JSON_MAPPER.readValue(DatatypeConverter.parseHexBinary(payload), DataSegment.class); String uniqueId = getUniqueIDFromLocalLoadSpec(segment.getLoadSpec()); String segmentPath = DataSegmentPusher.getDefaultStorageDirWithExistingUniquePath(segment, uniqueId); @@ -452,7 +452,7 @@ public class ExportMetadata extends GuiceRunnable ); } - String serialized = jsonMapper.writeValueAsString(segment); + String serialized = JSON_MAPPER.writeValueAsString(segment); if (useHexBlobs) { return DatatypeConverter.printHexBinary(StringUtils.toUtf8(serialized)); } else { diff --git a/services/src/main/java/org/apache/druid/cli/PullDependencies.java b/services/src/main/java/org/apache/druid/cli/PullDependencies.java index ebee879288f..2d98aecf751 100644 --- a/services/src/main/java/org/apache/druid/cli/PullDependencies.java +++ b/services/src/main/java/org/apache/druid/cli/PullDependencies.java @@ -73,7 +73,7 @@ public class PullDependencies implements Runnable private static final Logger log = new Logger(PullDependencies.class); @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") - private static final Set exclusions = new HashSet<>( + private static final Set EXCLUSIONS = new HashSet<>( /* // It is possible that extensions will pull down a lot of jars that are either @@ -389,7 +389,7 @@ public class PullDependencies implements Runnable private boolean accept(final Artifact artifact) { - return exclusions.contains(artifact.getGroupId()); + return EXCLUSIONS.contains(artifact.getGroupId()); } } ) @@ -400,7 +400,7 @@ public class PullDependencies implements Runnable final List artifacts = aether.resolveArtifacts(dependencyRequest); for (Artifact artifact : artifacts) { - if (!exclusions.contains(artifact.getGroupId())) { + if (!EXCLUSIONS.contains(artifact.getGroupId())) { log.info("Adding file [%s] at [%s]", artifact.getFile().getName(), toLocation.getAbsolutePath()); FileUtils.copyFileToDirectory(artifact.getFile(), toLocation); } else { diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayContainsOperatorConversion.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayContainsOperatorConversion.java index b98f221d3ef..dad30ddebbd 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayContainsOperatorConversion.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayContainsOperatorConversion.java @@ -27,7 +27,7 @@ import org.apache.druid.sql.calcite.expression.OperatorConversions; public class ArrayContainsOperatorConversion extends BaseExpressionDimFilterOperatorConversion { - private static final String exprFunction = "array_contains"; + private static final String EXPR_FUNCTION = "array_contains"; private static final SqlFunction SQL_FUNCTION = OperatorConversions .operatorBuilder("ARRAY_CONTAINS") @@ -49,6 +49,6 @@ public class ArrayContainsOperatorConversion extends BaseExpressionDimFilterOper public ArrayContainsOperatorConversion() { - super(SQL_FUNCTION, exprFunction); + super(SQL_FUNCTION, EXPR_FUNCTION); } } diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayOverlapOperatorConversion.java b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayOverlapOperatorConversion.java index b6b46e6a8b7..7fa3eba013f 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayOverlapOperatorConversion.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/ArrayOverlapOperatorConversion.java @@ -27,7 +27,7 @@ import org.apache.druid.sql.calcite.expression.OperatorConversions; public class ArrayOverlapOperatorConversion extends BaseExpressionDimFilterOperatorConversion { - private static final String exprFunction = "array_overlap"; + private static final String EXPR_FUNCTION = "array_overlap"; private static final SqlFunction SQL_FUNCTION = OperatorConversions .operatorBuilder("ARRAY_OVERLAP") @@ -49,6 +49,6 @@ public class ArrayOverlapOperatorConversion extends BaseExpressionDimFilterOpera public ArrayOverlapOperatorConversion() { - super(SQL_FUNCTION, exprFunction); + super(SQL_FUNCTION, EXPR_FUNCTION); } }