mirror of https://github.com/apache/druid.git
Add Checkstyle for constant name static final (#8060)
* check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * merging with upstream * review-1 * unknow changes * unknow changes * review-2 * merging with master * review-2 1 changes * review changes-2 2 * bug fix
This commit is contained in:
parent
d117bfb149
commit
33f0753a70
|
@ -48,7 +48,7 @@ import java.util.concurrent.TimeUnit;
|
|||
@Fork(value = 1)
|
||||
public class FlattenJSONBenchmark
|
||||
{
|
||||
private static final int numEvents = 100000;
|
||||
private static final int NUM_EVENTS = 100000;
|
||||
|
||||
List<String> flatInputs;
|
||||
List<String> nestedInputs;
|
||||
|
@ -67,15 +67,15 @@ public class FlattenJSONBenchmark
|
|||
{
|
||||
FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil();
|
||||
flatInputs = new ArrayList<String>();
|
||||
for (int i = 0; i < numEvents; i++) {
|
||||
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||
flatInputs.add(gen.generateFlatEvent());
|
||||
}
|
||||
nestedInputs = new ArrayList<String>();
|
||||
for (int i = 0; i < numEvents; i++) {
|
||||
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||
nestedInputs.add(gen.generateNestedEvent());
|
||||
}
|
||||
jqInputs = new ArrayList<String>();
|
||||
for (int i = 0; i < numEvents; i++) {
|
||||
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||
jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested"
|
||||
}
|
||||
|
||||
|
@ -95,7 +95,7 @@ public class FlattenJSONBenchmark
|
|||
for (String s : parsed.keySet()) {
|
||||
blackhole.consume(parsed.get(s));
|
||||
}
|
||||
flatCounter = (flatCounter + 1) % numEvents;
|
||||
flatCounter = (flatCounter + 1) % NUM_EVENTS;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ public class FlattenJSONBenchmark
|
|||
for (String s : parsed.keySet()) {
|
||||
blackhole.consume(parsed.get(s));
|
||||
}
|
||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
||||
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ public class FlattenJSONBenchmark
|
|||
for (String s : parsed.keySet()) {
|
||||
blackhole.consume(parsed.get(s));
|
||||
}
|
||||
jqCounter = (jqCounter + 1) % numEvents;
|
||||
jqCounter = (jqCounter + 1) % NUM_EVENTS;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ public class FlattenJSONBenchmark
|
|||
for (String s : parsed.keySet()) {
|
||||
blackhole.consume(parsed.get(s));
|
||||
}
|
||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
||||
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
|
@ -147,7 +147,7 @@ public class FlattenJSONBenchmark
|
|||
for (String s : parsed.keySet()) {
|
||||
blackhole.consume(parsed.get(s));
|
||||
}
|
||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
||||
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||
return parsed;
|
||||
}
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ public class FloatCompressionBenchmarkFileGenerator
|
|||
{
|
||||
private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class);
|
||||
public static final int ROW_NUM = 5000000;
|
||||
public static final List<CompressionStrategy> compressions =
|
||||
public static final List<CompressionStrategy> COMPRESSIONS =
|
||||
ImmutableList.of(
|
||||
CompressionStrategy.LZ4,
|
||||
CompressionStrategy.NONE
|
||||
|
@ -138,7 +138,7 @@ public class FloatCompressionBenchmarkFileGenerator
|
|||
|
||||
// create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
|
||||
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
||||
for (CompressionStrategy compression : compressions) {
|
||||
for (CompressionStrategy compression : COMPRESSIONS) {
|
||||
String name = entry.getKey() + "-" + compression;
|
||||
log.info("%s: ", name);
|
||||
File compFile = new File(dir, name);
|
||||
|
|
|
@ -62,7 +62,7 @@ public class GenericIndexedBenchmark
|
|||
{
|
||||
public static final int ITERATIONS = 10000;
|
||||
|
||||
static final ObjectStrategy<byte[]> byteArrayStrategy = new ObjectStrategy<byte[]>()
|
||||
static final ObjectStrategy<byte[]> BYTE_ARRAY_STRATEGY = new ObjectStrategy<byte[]>()
|
||||
{
|
||||
@Override
|
||||
public Class<byte[]> getClazz()
|
||||
|
@ -108,7 +108,7 @@ public class GenericIndexedBenchmark
|
|||
GenericIndexedWriter<byte[]> genericIndexedWriter = new GenericIndexedWriter<>(
|
||||
new OffHeapMemorySegmentWriteOutMedium(),
|
||||
"genericIndexedBenchmark",
|
||||
byteArrayStrategy
|
||||
BYTE_ARRAY_STRATEGY
|
||||
);
|
||||
genericIndexedWriter.open();
|
||||
|
||||
|
@ -132,7 +132,7 @@ public class GenericIndexedBenchmark
|
|||
|
||||
FileChannel fileChannel = FileChannel.open(file.toPath());
|
||||
MappedByteBuffer byteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, file.length());
|
||||
genericIndexed = GenericIndexed.read(byteBuffer, byteArrayStrategy, SmooshedFileMapper.load(smooshDir));
|
||||
genericIndexed = GenericIndexed.read(byteBuffer, BYTE_ARRAY_STRATEGY, SmooshedFileMapper.load(smooshDir));
|
||||
}
|
||||
|
||||
@Setup(Level.Trial)
|
||||
|
|
|
@ -52,8 +52,8 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
private IncrementalIndex incFloatIndex;
|
||||
private IncrementalIndex incStrIndex;
|
||||
private static AggregatorFactory[] aggs;
|
||||
static final int dimensionCount = 8;
|
||||
static final int maxRows = 250000;
|
||||
static final int DIMENSION_COUNT = 8;
|
||||
static final int MAX_ROWS = 250000;
|
||||
|
||||
private ArrayList<InputRow> longRows = new ArrayList<InputRow>();
|
||||
private ArrayList<InputRow> floatRows = new ArrayList<InputRow>();
|
||||
|
@ -61,9 +61,9 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
|
||||
|
||||
static {
|
||||
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1);
|
||||
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1);
|
||||
ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
|
||||
for (int i = 0; i < dimensionCount; ++i) {
|
||||
for (int i = 0; i < DIMENSION_COUNT; ++i) {
|
||||
ingestAggregatorFactories.add(
|
||||
new LongSumAggregatorFactory(
|
||||
StringUtils.format("sumResult%s", i),
|
||||
|
@ -125,23 +125,23 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
.setSimpleTestingIndexSchema(aggs)
|
||||
.setDeserializeComplexMetrics(false)
|
||||
.setReportParseExceptions(false)
|
||||
.setMaxRowCount(maxRows)
|
||||
.setMaxRowCount(MAX_ROWS)
|
||||
.buildOnheap();
|
||||
}
|
||||
|
||||
@Setup
|
||||
public void setup()
|
||||
{
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
longRows.add(getLongRow(0, dimensionCount));
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
longRows.add(getLongRow(0, DIMENSION_COUNT));
|
||||
}
|
||||
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
floatRows.add(getFloatRow(0, dimensionCount));
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
floatRows.add(getFloatRow(0, DIMENSION_COUNT));
|
||||
}
|
||||
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
stringRows.add(getStringRow(0, dimensionCount));
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
stringRows.add(getStringRow(0, DIMENSION_COUNT));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -156,10 +156,10 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
@Benchmark
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||
@OperationsPerInvocation(maxRows)
|
||||
@OperationsPerInvocation(MAX_ROWS)
|
||||
public void normalLongs(Blackhole blackhole) throws Exception
|
||||
{
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
InputRow row = longRows.get(i);
|
||||
int rv = incIndex.add(row).getRowCount();
|
||||
blackhole.consume(rv);
|
||||
|
@ -169,10 +169,10 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
@Benchmark
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||
@OperationsPerInvocation(maxRows)
|
||||
@OperationsPerInvocation(MAX_ROWS)
|
||||
public void normalFloats(Blackhole blackhole) throws Exception
|
||||
{
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
InputRow row = floatRows.get(i);
|
||||
int rv = incFloatIndex.add(row).getRowCount();
|
||||
blackhole.consume(rv);
|
||||
|
@ -182,10 +182,10 @@ public class IncrementalIndexRowTypeBenchmark
|
|||
@Benchmark
|
||||
@BenchmarkMode(Mode.AverageTime)
|
||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||
@OperationsPerInvocation(maxRows)
|
||||
@OperationsPerInvocation(MAX_ROWS)
|
||||
public void normalStrings(Blackhole blackhole) throws Exception
|
||||
{
|
||||
for (int i = 0; i < maxRows; i++) {
|
||||
for (int i = 0; i < MAX_ROWS; i++) {
|
||||
InputRow row = stringRows.get(i);
|
||||
int rv = incStrIndex.add(row).getRowCount();
|
||||
blackhole.consume(rv);
|
||||
|
|
|
@ -46,11 +46,11 @@ public class LongCompressionBenchmarkFileGenerator
|
|||
{
|
||||
private static final Logger log = new Logger(LongCompressionBenchmarkFileGenerator.class);
|
||||
public static final int ROW_NUM = 5000000;
|
||||
public static final List<CompressionStrategy> compressions =
|
||||
public static final List<CompressionStrategy> COMPRESSIONS =
|
||||
ImmutableList.of(
|
||||
CompressionStrategy.LZ4,
|
||||
CompressionStrategy.NONE);
|
||||
public static final List<CompressionFactory.LongEncodingStrategy> encodings =
|
||||
public static final List<CompressionFactory.LongEncodingStrategy> ENCODINGS =
|
||||
ImmutableList.of(CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.LONGS);
|
||||
|
||||
private static String dirPath = "longCompress/";
|
||||
|
@ -130,8 +130,8 @@ public class LongCompressionBenchmarkFileGenerator
|
|||
|
||||
// create compressed files using all combinations of CompressionStrategy and LongEncoding provided
|
||||
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
||||
for (CompressionStrategy compression : compressions) {
|
||||
for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
|
||||
for (CompressionStrategy compression : COMPRESSIONS) {
|
||||
for (CompressionFactory.LongEncodingStrategy encoding : ENCODINGS) {
|
||||
String name = entry.getKey() + "-" + compression + "-" + encoding;
|
||||
log.info("%s: ", name);
|
||||
File compFile = new File(dir, name);
|
||||
|
|
|
@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
|
||||
public class StupidPoolConcurrencyBenchmark
|
||||
{
|
||||
private static final Object simpleObject = new Object();
|
||||
private static final Object SIMPLE_OBJECT = new Object();
|
||||
|
||||
@State(Scope.Benchmark)
|
||||
public static class BenchmarkPool
|
||||
|
@ -50,7 +50,7 @@ public class StupidPoolConcurrencyBenchmark
|
|||
public Object get()
|
||||
{
|
||||
numPools.incrementAndGet();
|
||||
return simpleObject;
|
||||
return SIMPLE_OBJECT;
|
||||
}
|
||||
}
|
||||
);
|
||||
|
|
|
@ -116,12 +116,12 @@ public class TimeCompareBenchmark
|
|||
@Param({"100"})
|
||||
private int threshold;
|
||||
|
||||
protected static final Map<String, String> scriptDoubleSum = new HashMap<>();
|
||||
protected static final Map<String, String> SCRIPT_DOUBLE_SUM = new HashMap<>();
|
||||
|
||||
static {
|
||||
scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }");
|
||||
scriptDoubleSum.put("fnReset", "function reset() { return 0 }");
|
||||
scriptDoubleSum.put("fnCombine", "function combine(a,b) { return a + b }");
|
||||
SCRIPT_DOUBLE_SUM.put("fnAggregate", "function aggregate(current, a) { return current + a }");
|
||||
SCRIPT_DOUBLE_SUM.put("fnReset", "function reset() { return 0 }");
|
||||
SCRIPT_DOUBLE_SUM.put("fnCombine", "function combine(a,b) { return a + b }");
|
||||
}
|
||||
|
||||
private static final Logger log = new Logger(TimeCompareBenchmark.class);
|
||||
|
|
|
@ -41,7 +41,7 @@ import java.util.concurrent.TimeUnit;
|
|||
@State(Scope.Benchmark)
|
||||
public class CostBalancerStrategyBenchmark
|
||||
{
|
||||
private static final DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||
private static final DateTime T0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||
|
||||
private List<DataSegment> segments;
|
||||
private DataSegment segment;
|
||||
|
@ -55,12 +55,12 @@ public class CostBalancerStrategyBenchmark
|
|||
@Setup
|
||||
public void setupDummyCluster()
|
||||
{
|
||||
segment = createSegment(t0);
|
||||
segment = createSegment(T0);
|
||||
|
||||
Random r = ThreadLocalRandom.current();
|
||||
segments = new ArrayList<>(n);
|
||||
for (int i = 0; i < n; ++i) {
|
||||
final DateTime t = t0.minusHours(r.nextInt(365 * 24) - 365 * 12);
|
||||
final DateTime t = T0.minusHours(r.nextInt(365 * 24) - 365 * 12);
|
||||
segments.add(createSegment(t));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -48,6 +48,13 @@
|
|||
<suppress checks="Indentation" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
<suppress checks="Indentation" files="ProtoTestEventWrapper.java" />
|
||||
<suppress checks="Regexp" id="argumentLineBreaking" files="ProtoTestEventWrapper.java" />
|
||||
<suppress checks="ConstantName" files="ProtoTestEventWrapper.java" />
|
||||
<suppress checks="ConstantName" files="MySubRecord.java" />
|
||||
<suppress checks="ConstantName" files="SomeAvroDatum.java" />
|
||||
<suppress checks="ConstantName" files="MyFixed.java" />
|
||||
<suppress checks="ConstantName" files="MyEnum.java" />
|
||||
<suppress checks="ConstantName" files="Author" />
|
||||
<suppress checks="ConstantName" files="Book" />
|
||||
|
||||
<suppress checks="OneStatementPerLine" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||
|
||||
|
|
|
@ -306,7 +306,7 @@ codestyle/checkstyle.xml. "/>
|
|||
<property name="illegalPattern" value="true"/>
|
||||
<property name="message" value="Duplicate line"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!-- Added as per the issue #6936 - Prohibit method names starting with capital letters -->
|
||||
<module name="MethodName">
|
||||
<property name = "format" value = "^[a-z_]*[a-z0-9][a-zA-Z0-9_]*$"/>
|
||||
|
@ -319,5 +319,8 @@ codestyle/checkstyle.xml. "/>
|
|||
<module name="LocalFinalVariableName">
|
||||
<property name="format" value="^[a-z_]*[a-z0-9][a-zA-Z0-9_]*$"/>
|
||||
</module>
|
||||
<module name="ConstantName">
|
||||
<property name="format" value="^log(ger)?$|^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$"/>
|
||||
</module>
|
||||
</module>
|
||||
</module>
|
||||
|
|
|
@ -33,11 +33,11 @@ public class ReferenceCountingResourceHolder<T> implements ResourceHolder<T>
|
|||
{
|
||||
private static final Logger log = new Logger(ReferenceCountingResourceHolder.class);
|
||||
|
||||
private static final AtomicLong leakedResources = new AtomicLong();
|
||||
private static final AtomicLong LEAKED_RESOURCES = new AtomicLong();
|
||||
|
||||
public static long leakedResources()
|
||||
{
|
||||
return leakedResources.get();
|
||||
return LEAKED_RESOURCES.get();
|
||||
}
|
||||
|
||||
private final T object;
|
||||
|
@ -164,7 +164,7 @@ public class ReferenceCountingResourceHolder<T> implements ResourceHolder<T>
|
|||
}
|
||||
if (refCount.compareAndSet(count, 0)) {
|
||||
try {
|
||||
leakedResources.incrementAndGet();
|
||||
LEAKED_RESOURCES.incrementAndGet();
|
||||
closer.close();
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -53,7 +53,7 @@ public class TimestampSpec
|
|||
private final Function<Object, DateTime> timestampConverter;
|
||||
|
||||
// remember last value parsed
|
||||
private static final ThreadLocal<ParseCtx> parseCtx = ThreadLocal.withInitial(ParseCtx::new);
|
||||
private static final ThreadLocal<ParseCtx> PARSE_CTX = ThreadLocal.withInitial(ParseCtx::new);
|
||||
|
||||
@JsonCreator
|
||||
public TimestampSpec(
|
||||
|
@ -98,7 +98,7 @@ public class TimestampSpec
|
|||
{
|
||||
DateTime extracted = missingValue;
|
||||
if (input != null) {
|
||||
ParseCtx ctx = parseCtx.get();
|
||||
ParseCtx ctx = PARSE_CTX.get();
|
||||
// Check if the input is equal to the last input, so we don't need to parse it again
|
||||
if (input.equals(ctx.lastTimeObject)) {
|
||||
extracted = ctx.lastDateTime;
|
||||
|
@ -107,7 +107,7 @@ public class TimestampSpec
|
|||
ParseCtx newCtx = new ParseCtx();
|
||||
newCtx.lastTimeObject = input;
|
||||
newCtx.lastDateTime = extracted;
|
||||
parseCtx.set(newCtx);
|
||||
PARSE_CTX.set(newCtx);
|
||||
}
|
||||
}
|
||||
return extracted;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.List;
|
|||
*/
|
||||
public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
|
||||
{
|
||||
private static final Joiner joiner = Joiner.on(",");
|
||||
private static final Joiner JOINER = Joiner.on(",");
|
||||
|
||||
protected CommaListJoinSerializer()
|
||||
{
|
||||
|
@ -41,6 +41,6 @@ public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
|
|||
@Override
|
||||
public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider) throws IOException
|
||||
{
|
||||
jgen.writeString(joiner.join(value));
|
||||
jgen.writeString(JOINER.join(value));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,11 +43,11 @@ public abstract class Granularity implements Cacheable
|
|||
/**
|
||||
* Default patterns for parsing paths.
|
||||
*/
|
||||
private static final Pattern defaultPathPattern =
|
||||
private static final Pattern DEFAULT_PATH_PATTERN =
|
||||
Pattern.compile(
|
||||
"^.*[Yy]=(\\d{4})/(?:[Mm]=(\\d{2})/(?:[Dd]=(\\d{2})/(?:[Hh]=(\\d{2})/(?:[Mm]=(\\d{2})/(?:[Ss]=(\\d{2})/)?)?)?)?)?.*$"
|
||||
);
|
||||
private static final Pattern hivePathPattern =
|
||||
private static final Pattern HIVE_PATH_PATTERN =
|
||||
Pattern.compile("^.*dt=(\\d{4})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})?)?)?)?)?)?/.*$");
|
||||
|
||||
@JsonCreator
|
||||
|
@ -150,13 +150,13 @@ public abstract class Granularity implements Cacheable
|
|||
// Used by the toDate implementations.
|
||||
final Integer[] getDateValues(String filePath, Formatter formatter)
|
||||
{
|
||||
Pattern pattern = defaultPathPattern;
|
||||
Pattern pattern = DEFAULT_PATH_PATTERN;
|
||||
switch (formatter) {
|
||||
case DEFAULT:
|
||||
case LOWER_DEFAULT:
|
||||
break;
|
||||
case HIVE:
|
||||
pattern = hivePathPattern;
|
||||
pattern = HIVE_PATH_PATTERN;
|
||||
break;
|
||||
default:
|
||||
throw new IAE("Format %s not supported", formatter);
|
||||
|
|
|
@ -70,7 +70,7 @@ import java.util.TreeMap;
|
|||
public class FileSmoosher implements Closeable
|
||||
{
|
||||
private static final String FILE_EXTENSION = "smoosh";
|
||||
private static final Joiner joiner = Joiner.on(",");
|
||||
private static final Joiner JOINER = Joiner.on(",");
|
||||
private static final Logger LOG = new Logger(FileSmoosher.class);
|
||||
|
||||
private final File baseDir;
|
||||
|
@ -376,7 +376,7 @@ public class FileSmoosher implements Closeable
|
|||
for (Map.Entry<String, Metadata> entry : internalFiles.entrySet()) {
|
||||
final Metadata metadata = entry.getValue();
|
||||
out.write(
|
||||
joiner.join(
|
||||
JOINER.join(
|
||||
entry.getKey(),
|
||||
metadata.getFileNum(),
|
||||
metadata.getStartOffset(),
|
||||
|
|
|
@ -47,7 +47,7 @@ import java.util.Set;
|
|||
@Deprecated
|
||||
public class JSONToLowerParser implements Parser<String, Object>
|
||||
{
|
||||
private static final Function<JsonNode, Object> valueFunction = new Function<JsonNode, Object>()
|
||||
private static final Function<JsonNode, Object> VALUE_FUNCTION = new Function<JsonNode, Object>()
|
||||
{
|
||||
@Override
|
||||
public Object apply(JsonNode node)
|
||||
|
@ -128,14 +128,14 @@ public class JSONToLowerParser implements Parser<String, Object>
|
|||
if (node.isArray()) {
|
||||
final List<Object> nodeValue = Lists.newArrayListWithExpectedSize(node.size());
|
||||
for (final JsonNode subnode : node) {
|
||||
final Object subnodeValue = valueFunction.apply(subnode);
|
||||
final Object subnodeValue = VALUE_FUNCTION.apply(subnode);
|
||||
if (subnodeValue != null) {
|
||||
nodeValue.add(subnodeValue);
|
||||
}
|
||||
}
|
||||
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
||||
} else {
|
||||
final Object nodeValue = valueFunction.apply(node);
|
||||
final Object nodeValue = VALUE_FUNCTION.apply(node);
|
||||
if (nodeValue != null) {
|
||||
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
|||
private static final byte[] LARGE_EVENTS_STOP = new byte[]{};
|
||||
|
||||
private static final Logger log = new Logger(HttpPostEmitter.class);
|
||||
private static final AtomicInteger instanceCounter = new AtomicInteger();
|
||||
private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger();
|
||||
|
||||
final BatchingStrategy batchingStrategy;
|
||||
final HttpEmitterConfig config;
|
||||
|
@ -484,7 +484,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
|||
|
||||
EmittingThread(HttpEmitterConfig config)
|
||||
{
|
||||
super("HttpPostEmitter-" + instanceCounter.incrementAndGet());
|
||||
super("HttpPostEmitter-" + INSTANCE_COUNTER.incrementAndGet());
|
||||
setDaemon(true);
|
||||
timeoutLessThanMinimumException = new TimeoutException(
|
||||
"Timeout less than minimum [" + config.getMinHttpTimeoutMillis() + "] ms."
|
||||
|
|
|
@ -44,7 +44,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class Request
|
||||
{
|
||||
private static final ChannelBufferFactory factory = HeapChannelBufferFactory.getInstance();
|
||||
private static final ChannelBufferFactory FACTORY = HeapChannelBufferFactory.getInstance();
|
||||
|
||||
private final HttpMethod method;
|
||||
private final URL url;
|
||||
|
@ -147,7 +147,7 @@ public class Request
|
|||
|
||||
public Request setContent(String contentType, byte[] bytes, int offset, int length)
|
||||
{
|
||||
return setContent(contentType, factory.getBuffer(bytes, offset, length));
|
||||
return setContent(contentType, FACTORY.getBuffer(bytes, offset, length));
|
||||
}
|
||||
|
||||
public Request setContent(String contentType, ChannelBuffer content)
|
||||
|
|
|
@ -73,12 +73,12 @@ public class SigarUtil
|
|||
*/
|
||||
private static class CurrentProcessIdHolder
|
||||
{
|
||||
private static final long currentProcessId = new Sigar().getPid();
|
||||
private static final long CURRENT_PROCESS_ID = new Sigar().getPid();
|
||||
}
|
||||
|
||||
public static long getCurrentProcessId()
|
||||
{
|
||||
return CurrentProcessIdHolder.currentProcessId;
|
||||
return CurrentProcessIdHolder.CURRENT_PROCESS_ID;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.List;
|
|||
|
||||
public class HashBasedNumberedShardSpec extends NumberedShardSpec
|
||||
{
|
||||
private static final HashFunction hashFunction = Hashing.murmur3_32();
|
||||
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_32();
|
||||
private static final List<String> DEFAULT_PARTITION_DIMENSIONS = ImmutableList.of();
|
||||
|
||||
private final ObjectMapper jsonMapper;
|
||||
|
@ -100,7 +100,7 @@ public class HashBasedNumberedShardSpec extends NumberedShardSpec
|
|||
@VisibleForTesting
|
||||
public static int hash(ObjectMapper jsonMapper, List<Object> objects) throws JsonProcessingException
|
||||
{
|
||||
return hashFunction.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt();
|
||||
return HASH_FUNCTION.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -27,14 +27,14 @@ import java.io.IOException;
|
|||
|
||||
public class SerializablePairTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testBytesSerde() throws IOException
|
||||
{
|
||||
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
||||
byte[] bytes = jsonMapper.writeValueAsBytes(pair);
|
||||
SerializablePair<Number, Number> deserializedPair = jsonMapper.readValue(bytes, SerializablePair.class);
|
||||
byte[] bytes = JSON_MAPPER.writeValueAsBytes(pair);
|
||||
SerializablePair<Number, Number> deserializedPair = JSON_MAPPER.readValue(bytes, SerializablePair.class);
|
||||
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
||||
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
||||
}
|
||||
|
@ -43,8 +43,8 @@ public class SerializablePairTest
|
|||
public void testStringSerde() throws IOException
|
||||
{
|
||||
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
||||
String str = jsonMapper.writeValueAsString(pair);
|
||||
SerializablePair<Number, Number> deserializedPair = jsonMapper.readValue(str, SerializablePair.class);
|
||||
String str = JSON_MAPPER.writeValueAsString(pair);
|
||||
SerializablePair<Number, Number> deserializedPair = JSON_MAPPER.readValue(str, SerializablePair.class);
|
||||
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
||||
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
{
|
||||
private static long FILE_SIZE = -1;
|
||||
|
||||
private static final StringInputRowParser parser = new StringInputRowParser(
|
||||
private static final StringInputRowParser PARSER = new StringInputRowParser(
|
||||
new CSVParseSpec(
|
||||
new TimestampSpec(
|
||||
"timestamp",
|
||||
|
@ -163,7 +163,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -182,7 +182,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -238,7 +238,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -274,7 +274,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -292,7 +292,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
final File firehoseTmpDir = createFirehoseTmpDir("testRetry");
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
while (firehose.hasMore()) {
|
||||
rows.add(firehose.nextRow());
|
||||
}
|
||||
|
@ -312,7 +312,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
final TestPrefetchableTextFilesFirehoseFactory factory =
|
||||
TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5);
|
||||
|
||||
try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testMaxRetry"))) {
|
||||
try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testMaxRetry"))) {
|
||||
while (firehose.hasMore()) {
|
||||
firehose.nextRow();
|
||||
}
|
||||
|
@ -328,7 +328,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
final TestPrefetchableTextFilesFirehoseFactory factory =
|
||||
TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000);
|
||||
|
||||
try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testTimeout"))) {
|
||||
try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testTimeout"))) {
|
||||
while (firehose.hasMore()) {
|
||||
firehose.nextRow();
|
||||
}
|
||||
|
@ -344,7 +344,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
if (i > 0) {
|
||||
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
||||
}
|
||||
|
@ -367,7 +367,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
|||
|
||||
for (int i = 0; i < 5; i++) {
|
||||
final List<Row> rows = new ArrayList<>();
|
||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
||||
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||
if (i > 0) {
|
||||
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
||||
}
|
||||
|
|
|
@ -62,9 +62,9 @@ import java.util.zip.ZipOutputStream;
|
|||
|
||||
public class CompressionUtilsTest
|
||||
{
|
||||
private static final String content;
|
||||
private static final byte[] expected;
|
||||
private static final byte[] gzBytes;
|
||||
private static final String CONTENT;
|
||||
private static final byte[] EXPECTED;
|
||||
private static final byte[] GZ_BYTES;
|
||||
|
||||
static {
|
||||
final StringBuilder builder = new StringBuilder();
|
||||
|
@ -79,19 +79,19 @@ public class CompressionUtilsTest
|
|||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
content = builder.toString();
|
||||
expected = StringUtils.toUtf8(content);
|
||||
CONTENT = builder.toString();
|
||||
EXPECTED = StringUtils.toUtf8(CONTENT);
|
||||
|
||||
final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(expected.length);
|
||||
final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(EXPECTED.length);
|
||||
try (GZIPOutputStream outputStream = new GZIPOutputStream(gzByteStream)) {
|
||||
try (ByteArrayInputStream in = new ByteArrayInputStream(expected)) {
|
||||
try (ByteArrayInputStream in = new ByteArrayInputStream(EXPECTED)) {
|
||||
ByteStreams.copy(in, outputStream);
|
||||
}
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
gzBytes = gzByteStream.toByteArray();
|
||||
GZ_BYTES = gzByteStream.toByteArray();
|
||||
}
|
||||
|
||||
@Rule
|
||||
|
@ -101,9 +101,9 @@ public class CompressionUtilsTest
|
|||
|
||||
public static void assertGoodDataStream(InputStream stream) throws IOException
|
||||
{
|
||||
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) {
|
||||
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length)) {
|
||||
ByteStreams.copy(stream, bos);
|
||||
Assert.assertArrayEquals(expected, bos.toByteArray());
|
||||
Assert.assertArrayEquals(EXPECTED, bos.toByteArray());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,7 +113,7 @@ public class CompressionUtilsTest
|
|||
testDir = temporaryFolder.newFolder("testDir");
|
||||
testFile = new File(testDir, "test.dat");
|
||||
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
||||
outputStream.write(StringUtils.toUtf8(content));
|
||||
outputStream.write(StringUtils.toUtf8(CONTENT));
|
||||
}
|
||||
Assert.assertTrue(testFile.getParentFile().equals(testDir));
|
||||
}
|
||||
|
@ -395,35 +395,35 @@ public class CompressionUtilsTest
|
|||
{
|
||||
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
||||
Assert.assertEquals(
|
||||
gzBytes.length,
|
||||
GZ_BYTES.length,
|
||||
ByteStreams.copy(
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||
outputStream
|
||||
)
|
||||
);
|
||||
Assert.assertEquals(
|
||||
gzBytes.length,
|
||||
GZ_BYTES.length,
|
||||
ByteStreams.copy(
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||
outputStream
|
||||
)
|
||||
);
|
||||
Assert.assertEquals(
|
||||
gzBytes.length,
|
||||
GZ_BYTES.length,
|
||||
ByteStreams.copy(
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
||||
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||
outputStream
|
||||
)
|
||||
);
|
||||
}
|
||||
Assert.assertEquals(gzBytes.length * 3, testFile.length());
|
||||
Assert.assertEquals(GZ_BYTES.length * 3, testFile.length());
|
||||
try (InputStream inputStream = new ZeroRemainingInputStream(new FileInputStream(testFile))) {
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
final byte[] bytes = new byte[gzBytes.length];
|
||||
final byte[] bytes = new byte[GZ_BYTES.length];
|
||||
Assert.assertEquals(bytes.length, inputStream.read(bytes));
|
||||
Assert.assertArrayEquals(
|
||||
StringUtils.format("Failed on range %d", i),
|
||||
gzBytes,
|
||||
GZ_BYTES,
|
||||
bytes
|
||||
);
|
||||
}
|
||||
|
@ -435,10 +435,10 @@ public class CompressionUtilsTest
|
|||
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144
|
||||
public void testGunzipBug() throws IOException
|
||||
{
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
try (final InputStream inputStream = new GZIPInputStream(
|
||||
new ZeroRemainingInputStream(
|
||||
new ByteArrayInputStream(
|
||||
|
@ -446,17 +446,17 @@ public class CompressionUtilsTest
|
|||
)
|
||||
)
|
||||
)) {
|
||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) {
|
||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||
Assert.assertEquals(
|
||||
"Read terminated too soon (bug 7036144)",
|
||||
expected.length * 3,
|
||||
EXPECTED.length * 3,
|
||||
ByteStreams.copy(inputStream, outputStream)
|
||||
);
|
||||
final byte[] found = outputStream.toByteArray();
|
||||
Assert.assertEquals(expected.length * 3, found.length);
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
||||
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -468,10 +468,10 @@ public class CompressionUtilsTest
|
|||
testFile.delete();
|
||||
Assert.assertFalse(testFile.exists());
|
||||
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
|
||||
final ByteSource inputStreamFactory = new ByteSource()
|
||||
{
|
||||
|
@ -482,20 +482,20 @@ public class CompressionUtilsTest
|
|||
}
|
||||
};
|
||||
|
||||
Assert.assertEquals((long) (expected.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size());
|
||||
Assert.assertEquals((long) (EXPECTED.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size());
|
||||
|
||||
try (final InputStream inputStream = new FileInputStream(testFile)) {
|
||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) {
|
||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||
Assert.assertEquals(
|
||||
"Read terminated too soon (7036144)",
|
||||
expected.length * 3,
|
||||
EXPECTED.length * 3,
|
||||
ByteStreams.copy(inputStream, outputStream)
|
||||
);
|
||||
final byte[] found = outputStream.toByteArray();
|
||||
Assert.assertEquals(expected.length * 3, found.length);
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
||||
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -505,14 +505,14 @@ public class CompressionUtilsTest
|
|||
public void testGunzipBugStreamWorkarround() throws IOException
|
||||
{
|
||||
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
tripleGzByteStream.write(gzBytes);
|
||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
tripleGzByteStream.write(GZ_BYTES);
|
||||
|
||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length * 3)) {
|
||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||
Assert.assertEquals(
|
||||
expected.length * 3,
|
||||
EXPECTED.length * 3,
|
||||
CompressionUtils.gunzip(
|
||||
new ZeroRemainingInputStream(
|
||||
new ByteArrayInputStream(tripleGzByteStream.toByteArray())
|
||||
|
@ -520,10 +520,10 @@ public class CompressionUtilsTest
|
|||
)
|
||||
);
|
||||
final byte[] found = bos.toByteArray();
|
||||
Assert.assertEquals(expected.length * 3, found.length);
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
||||
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,7 +704,7 @@ public class CompressionUtilsTest
|
|||
@Override
|
||||
public int read(byte b[]) throws IOException
|
||||
{
|
||||
final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length);
|
||||
final int len = Math.min(b.length, GZ_BYTES.length - pos.get() % GZ_BYTES.length);
|
||||
pos.addAndGet(len);
|
||||
return read(b, 0, len);
|
||||
}
|
||||
|
@ -719,7 +719,7 @@ public class CompressionUtilsTest
|
|||
@Override
|
||||
public int read(byte b[], int off, int len) throws IOException
|
||||
{
|
||||
final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length);
|
||||
final int l = Math.min(len, GZ_BYTES.length - pos.get() % GZ_BYTES.length);
|
||||
pos.addAndGet(l);
|
||||
return super.read(b, off, l);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
|
||||
public class RetryUtilsTest
|
||||
{
|
||||
private static final Predicate<Throwable> isTransient = new Predicate<Throwable>()
|
||||
private static final Predicate<Throwable> IS_TRANSIENT = new Predicate<Throwable>()
|
||||
{
|
||||
@Override
|
||||
public boolean apply(Throwable e)
|
||||
|
@ -46,7 +46,7 @@ public class RetryUtilsTest
|
|||
count.incrementAndGet();
|
||||
return "hey";
|
||||
},
|
||||
isTransient,
|
||||
IS_TRANSIENT,
|
||||
2
|
||||
);
|
||||
Assert.assertEquals("result", "hey", result);
|
||||
|
@ -64,7 +64,7 @@ public class RetryUtilsTest
|
|||
count.incrementAndGet();
|
||||
throw new IOException("what");
|
||||
},
|
||||
isTransient,
|
||||
IS_TRANSIENT,
|
||||
2
|
||||
);
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ public class RetryUtilsTest
|
|||
throw new IOException("what");
|
||||
}
|
||||
},
|
||||
isTransient,
|
||||
IS_TRANSIENT,
|
||||
3
|
||||
);
|
||||
Assert.assertEquals("result", "hey", result);
|
||||
|
@ -108,7 +108,7 @@ public class RetryUtilsTest
|
|||
throw new IOException("uhh");
|
||||
}
|
||||
},
|
||||
isTransient,
|
||||
IS_TRANSIENT,
|
||||
3
|
||||
);
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
*/
|
||||
public class LifecycleTest
|
||||
{
|
||||
private static final Lifecycle.Handler dummyHandler = new Lifecycle.Handler()
|
||||
private static final Lifecycle.Handler DUMMY_HANDLER = new Lifecycle.Handler()
|
||||
{
|
||||
@Override
|
||||
public void start()
|
||||
|
@ -319,7 +319,7 @@ public class LifecycleTest
|
|||
reachedStop.await();
|
||||
|
||||
try {
|
||||
lifecycle.addHandler(dummyHandler);
|
||||
lifecycle.addHandler(DUMMY_HANDLER);
|
||||
Assert.fail("Expected exception");
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
|
@ -327,7 +327,7 @@ public class LifecycleTest
|
|||
}
|
||||
|
||||
try {
|
||||
lifecycle.addMaybeStartHandler(dummyHandler);
|
||||
lifecycle.addMaybeStartHandler(DUMMY_HANDLER);
|
||||
Assert.fail("Expected exception");
|
||||
}
|
||||
catch (IllegalStateException e) {
|
||||
|
|
|
@ -49,7 +49,7 @@ public class FlatTextFormatParserTest
|
|||
);
|
||||
}
|
||||
|
||||
private static final FlatTextFormatParserFactory parserFactory = new FlatTextFormatParserFactory();
|
||||
private static final FlatTextFormatParserFactory PARSER_FACTORY = new FlatTextFormatParserFactory();
|
||||
|
||||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
@ -65,7 +65,7 @@ public class FlatTextFormatParserTest
|
|||
public void testValidHeader()
|
||||
{
|
||||
final String header = concat(format, "time", "value1", "value2");
|
||||
final Parser<String, Object> parser = parserFactory.get(format, header);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, header);
|
||||
Assert.assertEquals(ImmutableList.of("time", "value1", "value2"), parser.getFieldNames());
|
||||
}
|
||||
|
||||
|
@ -77,14 +77,14 @@ public class FlatTextFormatParserTest
|
|||
expectedException.expect(ParseException.class);
|
||||
expectedException.expectMessage(StringUtils.format("Unable to parse header [%s]", header));
|
||||
|
||||
parserFactory.get(format, header);
|
||||
PARSER_FACTORY.get(format, header);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithHeader()
|
||||
{
|
||||
final String header = concat(format, "time", "value1", "value2");
|
||||
final Parser<String, Object> parser = parserFactory.get(format, header);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, header);
|
||||
final String body = concat(format, "hello", "world", "foo");
|
||||
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
||||
Assert.assertEquals(
|
||||
|
@ -97,7 +97,7 @@ public class FlatTextFormatParserTest
|
|||
@Test
|
||||
public void testWithoutHeader()
|
||||
{
|
||||
final Parser<String, Object> parser = parserFactory.get(format);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format);
|
||||
final String body = concat(format, "hello", "world", "foo");
|
||||
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
||||
Assert.assertEquals(
|
||||
|
@ -111,7 +111,7 @@ public class FlatTextFormatParserTest
|
|||
public void testWithSkipHeaderRows()
|
||||
{
|
||||
final int skipHeaderRows = 2;
|
||||
final Parser<String, Object> parser = parserFactory.get(format, false, skipHeaderRows);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, false, skipHeaderRows);
|
||||
parser.startFileFromBeginning();
|
||||
final String[] body = new String[]{
|
||||
concat(format, "header", "line", "1"),
|
||||
|
@ -133,7 +133,7 @@ public class FlatTextFormatParserTest
|
|||
@Test
|
||||
public void testWithHeaderRow()
|
||||
{
|
||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||
parser.startFileFromBeginning();
|
||||
final String[] body = new String[]{
|
||||
concat(format, "time", "value1", "value2"),
|
||||
|
@ -151,7 +151,7 @@ public class FlatTextFormatParserTest
|
|||
@Test
|
||||
public void testWithHeaderRowOfEmptyColumns()
|
||||
{
|
||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||
parser.startFileFromBeginning();
|
||||
final String[] body = new String[]{
|
||||
concat(format, "time", "", "value2", ""),
|
||||
|
@ -169,7 +169,7 @@ public class FlatTextFormatParserTest
|
|||
@Test
|
||||
public void testWithDifferentHeaderRows()
|
||||
{
|
||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||
parser.startFileFromBeginning();
|
||||
final String[] body = new String[]{
|
||||
concat(format, "time", "value1", "value2"),
|
||||
|
@ -206,7 +206,7 @@ public class FlatTextFormatParserTest
|
|||
);
|
||||
|
||||
final int skipHeaderRows = 2;
|
||||
final Parser<String, Object> parser = parserFactory.get(format, false, skipHeaderRows);
|
||||
final Parser<String, Object> parser = PARSER_FACTORY.get(format, false, skipHeaderRows);
|
||||
final String[] body = new String[]{
|
||||
concat(format, "header", "line", "1"),
|
||||
concat(format, "header", "line", "2"),
|
||||
|
|
|
@ -32,13 +32,13 @@ import java.util.Map;
|
|||
|
||||
public class JSONPathParserTest
|
||||
{
|
||||
private static final String json =
|
||||
private static final String JSON =
|
||||
"{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}";
|
||||
private static final String numbersJson =
|
||||
private static final String NUMBERS_JSON =
|
||||
"{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}";
|
||||
private static final String whackyCharacterJson =
|
||||
private static final String WHACKY_CHARACTER_JSON =
|
||||
"{\"one\": \"foo\\uD900\"}";
|
||||
private static final String nestedJson =
|
||||
private static final String NESTED_JSON =
|
||||
"{\"simpleVal\":\"text\", \"ignore_me\":[1, {\"x\":2}], \"blah\":[4,5,6], \"newmet\":5, " +
|
||||
"\"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, " +
|
||||
"\"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", " +
|
||||
|
@ -47,7 +47,7 @@ public class JSONPathParserTest
|
|||
"\"testMapConvert\":{\"big\": 1234567890000000000000, \"big2\":{\"big2\":1234567890000000000000}}, " +
|
||||
"\"testEmptyList\": [], " +
|
||||
"\"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":[7,8,9]}}";
|
||||
private static final String notJson = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK";
|
||||
private static final String NOT_JSON = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK";
|
||||
|
||||
@Rule
|
||||
public ExpectedException thrown = ExpectedException.none();
|
||||
|
@ -57,7 +57,7 @@ public class JSONPathParserTest
|
|||
{
|
||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(json);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(JSON);
|
||||
Assert.assertEquals(
|
||||
"jsonMap",
|
||||
ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"),
|
||||
|
@ -70,7 +70,7 @@ public class JSONPathParserTest
|
|||
{
|
||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(numbersJson);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(NUMBERS_JSON);
|
||||
Assert.assertEquals(
|
||||
"jsonMap",
|
||||
ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21),
|
||||
|
@ -83,7 +83,7 @@ public class JSONPathParserTest
|
|||
{
|
||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(whackyCharacterJson);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(WHACKY_CHARACTER_JSON);
|
||||
Assert.assertEquals(
|
||||
"jsonMap",
|
||||
ImmutableMap.of("one", "foo?"),
|
||||
|
@ -113,7 +113,7 @@ public class JSONPathParserTest
|
|||
|
||||
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(nestedJson);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(NESTED_JSON);
|
||||
|
||||
// Root fields
|
||||
Assert.assertEquals(ImmutableList.of(1L, 2L, 3L), jsonMap.get("baz"));
|
||||
|
@ -174,7 +174,7 @@ public class JSONPathParserTest
|
|||
fields.add(new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq-met-array", ".met.a"));
|
||||
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(nestedJson);
|
||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(NESTED_JSON);
|
||||
|
||||
// Root fields
|
||||
Assert.assertEquals("text", jsonMap.get("simpleVal"));
|
||||
|
@ -211,7 +211,7 @@ public class JSONPathParserTest
|
|||
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
||||
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||
jsonParser.parseToMap(nestedJson);
|
||||
jsonParser.parseToMap(NESTED_JSON);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -225,7 +225,7 @@ public class JSONPathParserTest
|
|||
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
||||
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||
jsonParser.parseToMap(nestedJson);
|
||||
jsonParser.parseToMap(NESTED_JSON);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -234,9 +234,9 @@ public class JSONPathParserTest
|
|||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||
|
||||
thrown.expect(ParseException.class);
|
||||
thrown.expectMessage("Unable to parse row [" + notJson + "]");
|
||||
thrown.expectMessage("Unable to parse row [" + NOT_JSON + "]");
|
||||
|
||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||
jsonParser.parseToMap(notJson);
|
||||
jsonParser.parseToMap(NOT_JSON);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ import java.util.stream.Stream;
|
|||
*/
|
||||
public class EmitterTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
public static String TARGET_URL = "http://metrics.foo.bar/";
|
||||
public static final Response OK_RESPONSE = Stream
|
||||
.of(responseBuilder(HttpVersion.HTTP_1_1, HttpResponseStatus.CREATED))
|
||||
|
@ -120,7 +120,7 @@ public class EmitterTest
|
|||
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||
config,
|
||||
httpClient,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
);
|
||||
emitter.start();
|
||||
return emitter;
|
||||
|
@ -135,7 +135,7 @@ public class EmitterTest
|
|||
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||
config,
|
||||
httpClient,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
);
|
||||
emitter.start();
|
||||
return emitter;
|
||||
|
@ -150,7 +150,7 @@ public class EmitterTest
|
|||
props.setProperty("org.apache.druid.java.util.emitter.flushCount", String.valueOf(size));
|
||||
|
||||
Lifecycle lifecycle = new Lifecycle();
|
||||
Emitter emitter = Emitters.create(props, httpClient, jsonMapper, lifecycle);
|
||||
Emitter emitter = Emitters.create(props, httpClient, JSON_MAPPER, lifecycle);
|
||||
Assert.assertTrue(StringUtils.format(
|
||||
"HttpPostEmitter emitter should be created, but found %s",
|
||||
emitter.getClass().getName()
|
||||
|
@ -169,7 +169,7 @@ public class EmitterTest
|
|||
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||
config,
|
||||
httpClient,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
);
|
||||
emitter.start();
|
||||
return emitter;
|
||||
|
@ -187,7 +187,7 @@ public class EmitterTest
|
|||
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||
config,
|
||||
httpClient,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
);
|
||||
emitter.start();
|
||||
return emitter;
|
||||
|
@ -203,7 +203,7 @@ public class EmitterTest
|
|||
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||
config,
|
||||
httpClient,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
);
|
||||
emitter.start();
|
||||
return emitter;
|
||||
|
@ -232,8 +232,8 @@ public class EmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
@ -274,8 +274,8 @@ public class EmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
@ -459,8 +459,8 @@ public class EmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"%s\n%s\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
@ -513,8 +513,8 @@ public class EmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(counter.getAndIncrement())),
|
||||
jsonMapper.writeValueAsString(events.get(counter.getAndIncrement()))
|
||||
JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement())),
|
||||
JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement()))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
@ -576,8 +576,8 @@ public class EmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
baos.toString(StandardCharsets.UTF_8.name())
|
||||
);
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
public class HttpEmitterTest
|
||||
{
|
||||
private final MockHttpClient httpClient = new MockHttpClient();
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||
{
|
||||
@Override
|
||||
public byte[] writeValueAsBytes(Object value)
|
||||
|
@ -71,7 +71,7 @@ public class HttpEmitterTest
|
|||
.setBatchingStrategy(BatchingStrategy.ONLY_EVENTS)
|
||||
.setHttpTimeoutAllowanceFactor(timeoutAllowanceFactor)
|
||||
.build();
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||
|
||||
long startMs = System.currentTimeMillis();
|
||||
emitter.start();
|
||||
|
|
|
@ -42,7 +42,7 @@ import java.util.concurrent.ThreadLocalRandom;
|
|||
public class HttpPostEmitterStressTest
|
||||
{
|
||||
private static final int N = 10_000;
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||
{
|
||||
@Override
|
||||
public byte[] writeValueAsBytes(Object value)
|
||||
|
@ -64,7 +64,7 @@ public class HttpPostEmitterStressTest
|
|||
// For this test, we don't need any batches to be dropped, i. e. "gaps" in data
|
||||
.setBatchQueueSizeLimit(1000)
|
||||
.build();
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||
int nThreads = Runtime.getRuntime().availableProcessors() * 2;
|
||||
final List<IntList> eventsPerThread = new ArrayList<>(nThreads);
|
||||
final List<List<Batch>> eventBatchesPerThread = new ArrayList<>(nThreads);
|
||||
|
|
|
@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
public class HttpPostEmitterTest
|
||||
{
|
||||
|
||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||
{
|
||||
@Override
|
||||
public byte[] writeValueAsBytes(Object value)
|
||||
|
@ -72,7 +72,7 @@ public class HttpPostEmitterTest
|
|||
.setMaxBatchSize(1024 * 1024)
|
||||
.setBatchQueueSizeLimit(1000)
|
||||
.build();
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||
emitter.start();
|
||||
|
||||
// emit first event
|
||||
|
|
|
@ -42,7 +42,7 @@ import java.util.Properties;
|
|||
|
||||
public class ParametrizedUriEmitterTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
private MockHttpClient httpClient;
|
||||
private Lifecycle lifecycle;
|
||||
|
@ -98,8 +98,8 @@ public class ParametrizedUriEmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
@ -148,8 +148,8 @@ public class ParametrizedUriEmitterTest
|
|||
emitter.flush();
|
||||
Assert.assertTrue(httpClient.succeeded());
|
||||
Map<String, String> expected = ImmutableMap.of(
|
||||
"http://example.com/test1", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(0))),
|
||||
"http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1)))
|
||||
"http://example.com/test1", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(0))),
|
||||
"http://example.com/test2", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(1)))
|
||||
);
|
||||
Assert.assertEquals(expected, results);
|
||||
}
|
||||
|
@ -173,8 +173,8 @@ public class ParametrizedUriEmitterTest
|
|||
Assert.assertEquals(
|
||||
StringUtils.format(
|
||||
"[%s,%s]\n",
|
||||
jsonMapper.writeValueAsString(events.get(0)),
|
||||
jsonMapper.writeValueAsString(events.get(1))
|
||||
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||
),
|
||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||
);
|
||||
|
|
|
@ -25,46 +25,46 @@ import org.junit.Test;
|
|||
|
||||
public class DefaultPasswordProviderTest
|
||||
{
|
||||
private static final String pwd = "nothing";
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final String PWD = "nothing";
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testExplicitConstruction()
|
||||
{
|
||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD);
|
||||
Assert.assertEquals(PWD, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFromStringConstruction()
|
||||
{
|
||||
DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(pwd);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(PWD);
|
||||
Assert.assertEquals(PWD, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializationFromJsonString() throws Exception
|
||||
{
|
||||
PasswordProvider pp = jsonMapper.readValue("\"" + pwd + "\"",
|
||||
PasswordProvider pp = JSON_MAPPER.readValue("\"" + PWD + "\"",
|
||||
PasswordProvider.class);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
Assert.assertEquals(PWD, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeserializationFromJson() throws Exception
|
||||
{
|
||||
PasswordProvider pp = jsonMapper.readValue(
|
||||
"{\"type\": \"default\", \"password\": \"" + pwd + "\"}",
|
||||
PasswordProvider pp = JSON_MAPPER.readValue(
|
||||
"{\"type\": \"default\", \"password\": \"" + PWD + "\"}",
|
||||
PasswordProvider.class);
|
||||
Assert.assertEquals(pwd, pp.getPassword());
|
||||
Assert.assertEquals(PWD, pp.getPassword());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSerializationWithMixIn() throws Exception
|
||||
{
|
||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd);
|
||||
jsonMapper.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class);
|
||||
String valueAsString = jsonMapper.writeValueAsString(pp);
|
||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD);
|
||||
JSON_MAPPER.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class);
|
||||
String valueAsString = JSON_MAPPER.writeValueAsString(pp);
|
||||
Assert.assertEquals("{\"type\":\"default\"}", valueAsString);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,16 +27,16 @@ import java.io.IOException;
|
|||
|
||||
public class EnvironmentVariablePasswordProviderTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testSerde() throws IOException
|
||||
{
|
||||
String providerString = "{\"type\": \"environment\", \"variable\" : \"test\"}";
|
||||
PasswordProvider provider = jsonMapper.readValue(providerString, PasswordProvider.class);
|
||||
PasswordProvider provider = JSON_MAPPER.readValue(providerString, PasswordProvider.class);
|
||||
Assert.assertTrue(provider instanceof EnvironmentVariablePasswordProvider);
|
||||
Assert.assertEquals("test", ((EnvironmentVariablePasswordProvider) provider).getVariable());
|
||||
PasswordProvider serde = jsonMapper.readValue(jsonMapper.writeValueAsString(provider), PasswordProvider.class);
|
||||
PasswordProvider serde = JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(provider), PasswordProvider.class);
|
||||
Assert.assertEquals(provider, serde);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,7 @@ public class MetadataStorageConnectorConfigTest
|
|||
)
|
||||
throws IOException
|
||||
{
|
||||
return jsonMapper.readValue(
|
||||
return JSON_MAPPER.readValue(
|
||||
"{" +
|
||||
"\"createTables\": \"" + createTables + "\"," +
|
||||
"\"host\": \"" + host + "\"," +
|
||||
|
@ -79,7 +79,7 @@ public class MetadataStorageConnectorConfigTest
|
|||
Assert.assertTrue(metadataStorageConnectorConfig.hashCode() == metadataStorageConnectorConfig2.hashCode());
|
||||
}
|
||||
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testMetadataStorageConnectionConfigSimplePassword() throws Exception
|
||||
|
@ -119,7 +119,7 @@ public class MetadataStorageConnectorConfigTest
|
|||
String pwd
|
||||
) throws Exception
|
||||
{
|
||||
MetadataStorageConnectorConfig config = jsonMapper.readValue(
|
||||
MetadataStorageConnectorConfig config = JSON_MAPPER.readValue(
|
||||
"{" +
|
||||
"\"createTables\": \"" + createTables + "\"," +
|
||||
"\"host\": \"" + host + "\"," +
|
||||
|
@ -162,7 +162,7 @@ public class MetadataStorageConnectorConfigTest
|
|||
String pwd
|
||||
) throws Exception
|
||||
{
|
||||
MetadataStorageConnectorConfig config = jsonMapper.readValue(
|
||||
MetadataStorageConnectorConfig config = JSON_MAPPER.readValue(
|
||||
"{" +
|
||||
"\"createTables\": \"" + createTables + "\"," +
|
||||
"\"host\": \"" + host + "\"," +
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.TreeSet;
|
|||
*/
|
||||
public class DataSegmentTest
|
||||
{
|
||||
private static final ObjectMapper mapper = new TestObjectMapper();
|
||||
private static final ObjectMapper MAPPER = new TestObjectMapper();
|
||||
private static final int TEST_VERSION = 0x9;
|
||||
|
||||
private static ShardSpec getShardSpec(final int partitionNum)
|
||||
|
@ -107,7 +107,7 @@ public class DataSegmentTest
|
|||
{
|
||||
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
||||
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
||||
mapper.setInjectableValues(injectableValues);
|
||||
MAPPER.setInjectableValues(injectableValues);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -129,8 +129,8 @@ public class DataSegmentTest
|
|||
1
|
||||
);
|
||||
|
||||
final Map<String, Object> objectMap = mapper.readValue(
|
||||
mapper.writeValueAsString(segment),
|
||||
final Map<String, Object> objectMap = MAPPER.readValue(
|
||||
MAPPER.writeValueAsString(segment),
|
||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||
);
|
||||
|
||||
|
@ -145,7 +145,7 @@ public class DataSegmentTest
|
|||
Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion"));
|
||||
Assert.assertEquals(1, objectMap.get("size"));
|
||||
|
||||
DataSegment deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
DataSegment deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||
|
||||
Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource());
|
||||
Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval());
|
||||
|
@ -157,13 +157,13 @@ public class DataSegmentTest
|
|||
Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
|
||||
Assert.assertEquals(segment.getId(), deserializedSegment.getId());
|
||||
|
||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||
Assert.assertEquals(0, segment.compareTo(deserializedSegment));
|
||||
|
||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||
Assert.assertEquals(0, deserializedSegment.compareTo(segment));
|
||||
|
||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||
Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode());
|
||||
}
|
||||
|
||||
|
@ -224,7 +224,7 @@ public class DataSegmentTest
|
|||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||
.build();
|
||||
|
||||
final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
||||
final DataSegment segment2 = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||
Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions());
|
||||
Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics());
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ import java.util.Map;
|
|||
|
||||
public class SegmentWithOvershadowedStatusTest
|
||||
{
|
||||
private static final ObjectMapper mapper = new TestObjectMapper();
|
||||
private static final ObjectMapper MAPPER = new TestObjectMapper();
|
||||
private static final int TEST_VERSION = 0x9;
|
||||
|
||||
@Before
|
||||
|
@ -51,7 +51,7 @@ public class SegmentWithOvershadowedStatusTest
|
|||
{
|
||||
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
||||
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
||||
mapper.setInjectableValues(injectableValues);
|
||||
MAPPER.setInjectableValues(injectableValues);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -74,8 +74,8 @@ public class SegmentWithOvershadowedStatusTest
|
|||
|
||||
final SegmentWithOvershadowedStatus segment = new SegmentWithOvershadowedStatus(dataSegment, false);
|
||||
|
||||
final Map<String, Object> objectMap = mapper.readValue(
|
||||
mapper.writeValueAsString(segment),
|
||||
final Map<String, Object> objectMap = MAPPER.readValue(
|
||||
MAPPER.writeValueAsString(segment),
|
||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||
);
|
||||
|
||||
|
@ -91,9 +91,9 @@ public class SegmentWithOvershadowedStatusTest
|
|||
Assert.assertEquals(1, objectMap.get("size"));
|
||||
Assert.assertEquals(false, objectMap.get("overshadowed"));
|
||||
|
||||
final String json = mapper.writeValueAsString(segment);
|
||||
final String json = MAPPER.writeValueAsString(segment);
|
||||
|
||||
final TestSegmentWithOvershadowedStatus deserializedSegment = mapper.readValue(
|
||||
final TestSegmentWithOvershadowedStatus deserializedSegment = MAPPER.readValue(
|
||||
json,
|
||||
TestSegmentWithOvershadowedStatus.class
|
||||
);
|
||||
|
|
|
@ -39,8 +39,8 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
|||
{
|
||||
|
||||
private static final String SEGMENT_FILE_NAME = "segment";
|
||||
private static final String containerName = "container";
|
||||
private static final String blobPath = "/path/to/storage/index.zip";
|
||||
private static final String CONTAINER_NAME = "container";
|
||||
private static final String BLOB_PATH = "/path/to/storage/index.zip";
|
||||
private AzureStorage azureStorage;
|
||||
|
||||
@Before
|
||||
|
@ -58,13 +58,13 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
|||
try {
|
||||
final InputStream zipStream = new FileInputStream(pulledFile);
|
||||
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream);
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream);
|
||||
|
||||
replayAll();
|
||||
|
||||
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
||||
|
||||
FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir);
|
||||
FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, toDir);
|
||||
|
||||
File expected = new File(toDir, SEGMENT_FILE_NAME);
|
||||
Assert.assertEquals(value.length(), result.size());
|
||||
|
@ -86,7 +86,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
|||
|
||||
final File outDir = Files.createTempDirectory("druid").toFile();
|
||||
try {
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andThrow(
|
||||
new URISyntaxException(
|
||||
"error",
|
||||
"error",
|
||||
|
@ -98,7 +98,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
|||
|
||||
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
||||
|
||||
puller.getSegmentFiles(containerName, blobPath, outDir);
|
||||
puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir);
|
||||
|
||||
Assert.assertFalse(outDir.exists());
|
||||
|
||||
|
|
|
@ -50,13 +50,13 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
|||
@Rule
|
||||
public final TemporaryFolder tempFolder = new TemporaryFolder();
|
||||
|
||||
private static final String containerName = "container";
|
||||
private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
||||
private static final DataSegment dataSegment = new DataSegment(
|
||||
private static final String CONTAINER_NAME = "container";
|
||||
private static final String BLOB_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
||||
private static final DataSegment DATA_SEGMENT = new DataSegment(
|
||||
"test",
|
||||
Intervals.of("2015-04-12/2015-04-13"),
|
||||
"1",
|
||||
ImmutableMap.of("containerName", containerName, "blobPath", blobPath),
|
||||
ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH),
|
||||
null,
|
||||
null,
|
||||
NoneShardSpec.instance(),
|
||||
|
@ -129,8 +129,8 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
|||
{
|
||||
|
||||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||
final String storageDir = pusher.getStorageDir(dataSegment, false);
|
||||
final String azurePath = pusher.getAzurePath(dataSegment, false);
|
||||
final String storageDir = pusher.getStorageDir(DATA_SEGMENT, false);
|
||||
final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false);
|
||||
|
||||
Assert.assertEquals(
|
||||
StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME),
|
||||
|
@ -144,15 +144,15 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
|||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||
final int binaryVersion = 9;
|
||||
final File compressedSegmentData = new File("index.zip");
|
||||
final String azurePath = pusher.getAzurePath(dataSegment, false);
|
||||
final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false);
|
||||
|
||||
azureStorage.uploadBlob(compressedSegmentData, containerName, azurePath);
|
||||
azureStorage.uploadBlob(compressedSegmentData, CONTAINER_NAME, azurePath);
|
||||
EasyMock.expectLastCall();
|
||||
|
||||
replayAll();
|
||||
|
||||
DataSegment pushedDataSegment = pusher.uploadDataSegment(
|
||||
dataSegment,
|
||||
DATA_SEGMENT,
|
||||
binaryVersion,
|
||||
0, // empty file
|
||||
compressedSegmentData,
|
||||
|
@ -180,7 +180,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
|||
public void storageDirContainsNoColonsTest()
|
||||
{
|
||||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||
DataSegment withColons = dataSegment.withVersion("2018-01-05T14:54:09.295Z");
|
||||
DataSegment withColons = DATA_SEGMENT.withVersion("2018-01-05T14:54:09.295Z");
|
||||
String segmentPath = pusher.getStorageDir(withColons, false);
|
||||
Assert.assertFalse("Path should not contain any columns", segmentPath.contains(":"));
|
||||
}
|
||||
|
|
|
@ -39,10 +39,10 @@ import java.nio.charset.StandardCharsets;
|
|||
public class AzureTaskLogsTest extends EasyMockSupport
|
||||
{
|
||||
|
||||
private static final String container = "test";
|
||||
private static final String prefix = "test/log";
|
||||
private static final String taskid = "taskid";
|
||||
private static final AzureTaskLogsConfig azureTaskLogsConfig = new AzureTaskLogsConfig(container, prefix, 3);
|
||||
private static final String CONTAINER = "test";
|
||||
private static final String PREFIX = "test/log";
|
||||
private static final String TASK_ID = "taskid";
|
||||
private static final AzureTaskLogsConfig AZURE_TASK_LOGS_CONFIG = new AzureTaskLogsConfig(CONTAINER, PREFIX, 3);
|
||||
|
||||
private AzureStorage azureStorage;
|
||||
private AzureTaskLogs azureTaskLogs;
|
||||
|
@ -51,7 +51,7 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
|||
public void before()
|
||||
{
|
||||
azureStorage = createMock(AzureStorage.class);
|
||||
azureTaskLogs = new AzureTaskLogs(azureTaskLogsConfig, azureStorage);
|
||||
azureTaskLogs = new AzureTaskLogs(AZURE_TASK_LOGS_CONFIG, azureStorage);
|
||||
}
|
||||
|
||||
|
||||
|
@ -63,12 +63,12 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
|||
try {
|
||||
final File logFile = new File(tmpDir, "log");
|
||||
|
||||
azureStorage.uploadBlob(logFile, container, prefix + "/" + taskid + "/log");
|
||||
azureStorage.uploadBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log");
|
||||
EasyMock.expectLastCall();
|
||||
|
||||
replayAll();
|
||||
|
||||
azureTaskLogs.pushTaskLog(taskid, logFile);
|
||||
azureTaskLogs.pushTaskLog(TASK_ID, logFile);
|
||||
|
||||
verifyAll();
|
||||
}
|
||||
|
@ -82,16 +82,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
|||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
||||
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
||||
|
||||
|
||||
replayAll();
|
||||
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 0);
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 0);
|
||||
|
||||
final StringWriter writer = new StringWriter();
|
||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||
|
@ -105,16 +105,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
|||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
||||
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
||||
|
||||
|
||||
replayAll();
|
||||
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 5);
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 5);
|
||||
|
||||
final StringWriter writer = new StringWriter();
|
||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||
|
@ -128,16 +128,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
|||
{
|
||||
final String testLog = "hello this is a log";
|
||||
|
||||
final String blobPath = prefix + "/" + taskid + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
||||
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||
new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
|
||||
|
||||
|
||||
replayAll();
|
||||
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, -3);
|
||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, -3);
|
||||
|
||||
final StringWriter writer = new StringWriter();
|
||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap;
|
|||
|
||||
public class ConciseBitMapFactory implements BitMapFactory
|
||||
{
|
||||
private static final BitmapFactory bitmapFactory = new ConciseBitmapFactory();
|
||||
private static final BitmapFactory BITMAP_FACTORY = new ConciseBitmapFactory();
|
||||
|
||||
public ConciseBitMapFactory()
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ public class ConciseBitMapFactory implements BitMapFactory
|
|||
@Override
|
||||
public MutableBitmap makeEmptyMutableBitmap()
|
||||
{
|
||||
return bitmapFactory.makeEmptyMutableBitmap();
|
||||
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap;
|
|||
|
||||
public class JavaBitMapFactory implements BitMapFactory
|
||||
{
|
||||
private static final BitmapFactory bitmapFactory = new BitSetBitmapFactory();
|
||||
private static final BitmapFactory BITMAP_FACTORY = new BitSetBitmapFactory();
|
||||
|
||||
public JavaBitMapFactory()
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ public class JavaBitMapFactory implements BitMapFactory
|
|||
@Override
|
||||
public MutableBitmap makeEmptyMutableBitmap()
|
||||
{
|
||||
return bitmapFactory.makeEmptyMutableBitmap();
|
||||
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
|
|||
|
||||
public class RoaringBitMapFactory implements BitMapFactory
|
||||
{
|
||||
private static final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
|
||||
private static final BitmapFactory BITMAP_FACTORY = new RoaringBitmapFactory();
|
||||
|
||||
public RoaringBitMapFactory()
|
||||
{
|
||||
|
@ -34,7 +34,7 @@ public class RoaringBitMapFactory implements BitMapFactory
|
|||
@Override
|
||||
public MutableBitmap makeEmptyMutableBitmap()
|
||||
{
|
||||
return bitmapFactory.makeEmptyMutableBitmap();
|
||||
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -113,20 +113,20 @@ public class DistinctCountGroupByQueryTest
|
|||
);
|
||||
|
||||
GroupByQuery query = new GroupByQuery.Builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setGranularity(QueryRunnerTestHelper.allGran)
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setGranularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.setDimensions(new DefaultDimensionSpec(
|
||||
client_type,
|
||||
client_type
|
||||
))
|
||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.setLimitSpec(
|
||||
new DefaultLimitSpec(
|
||||
Collections.singletonList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)),
|
||||
10
|
||||
)
|
||||
)
|
||||
.setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new DistinctCountAggregatorFactory("UV", visitor_id, null))
|
||||
.setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null))
|
||||
.build();
|
||||
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
|
||||
|
||||
|
|
|
@ -86,12 +86,12 @@ public class DistinctCountTimeseriesQueryTest
|
|||
);
|
||||
|
||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(
|
||||
Lists.newArrayList(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
QueryRunnerTestHelper.ROWS_COUNT,
|
||||
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -115,14 +115,14 @@ public class DistinctCountTopNQueryTest
|
|||
)
|
||||
);
|
||||
|
||||
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.dimension(client_type)
|
||||
.metric("UV")
|
||||
.threshold(10)
|
||||
.aggregators(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
QueryRunnerTestHelper.ROWS_COUNT,
|
||||
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
||||
)
|
||||
.build();
|
||||
|
|
|
@ -69,7 +69,7 @@ import java.util.stream.Collectors;
|
|||
public class DerivativeDataSourceManager
|
||||
{
|
||||
private static final EmittingLogger log = new EmittingLogger(DerivativeDataSourceManager.class);
|
||||
private static final AtomicReference<ConcurrentHashMap<String, SortedSet<DerivativeDataSource>>> derivativesRef =
|
||||
private static final AtomicReference<ConcurrentHashMap<String, SortedSet<DerivativeDataSource>>> DERIVATIVES_REF =
|
||||
new AtomicReference<>(new ConcurrentHashMap<>());
|
||||
private final MaterializedViewConfig config;
|
||||
private final Supplier<MetadataStorageTablesConfig> dbTables;
|
||||
|
@ -137,7 +137,7 @@ public class DerivativeDataSourceManager
|
|||
started = false;
|
||||
future.cancel(true);
|
||||
future = null;
|
||||
derivativesRef.set(new ConcurrentHashMap<>());
|
||||
DERIVATIVES_REF.set(new ConcurrentHashMap<>());
|
||||
exec.shutdownNow();
|
||||
exec = null;
|
||||
}
|
||||
|
@ -145,12 +145,12 @@ public class DerivativeDataSourceManager
|
|||
|
||||
public static ImmutableSet<DerivativeDataSource> getDerivatives(String datasource)
|
||||
{
|
||||
return ImmutableSet.copyOf(derivativesRef.get().getOrDefault(datasource, new TreeSet<>()));
|
||||
return ImmutableSet.copyOf(DERIVATIVES_REF.get().getOrDefault(datasource, new TreeSet<>()));
|
||||
}
|
||||
|
||||
public static ImmutableMap<String, Set<DerivativeDataSource>> getAllDerivatives()
|
||||
{
|
||||
return ImmutableMap.copyOf(derivativesRef.get());
|
||||
return ImmutableMap.copyOf(DERIVATIVES_REF.get());
|
||||
}
|
||||
|
||||
private void updateDerivatives()
|
||||
|
@ -205,8 +205,8 @@ public class DerivativeDataSourceManager
|
|||
}
|
||||
ConcurrentHashMap<String, SortedSet<DerivativeDataSource>> current;
|
||||
do {
|
||||
current = derivativesRef.get();
|
||||
} while (!derivativesRef.compareAndSet(current, newDerivatives));
|
||||
current = DERIVATIVES_REF.get();
|
||||
} while (!DERIVATIVES_REF.compareAndSet(current, newDerivatives));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -203,7 +203,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
|||
// build user query
|
||||
TopNQuery userQuery = new TopNQueryBuilder()
|
||||
.dataSource("base")
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension("dim1")
|
||||
.metric("cost")
|
||||
.threshold(4)
|
||||
|
@ -214,7 +214,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
|||
List<Query> expectedQueryAfterOptimizing = Lists.newArrayList(
|
||||
new TopNQueryBuilder()
|
||||
.dataSource("derivative")
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension("dim1")
|
||||
.metric("cost")
|
||||
.threshold(4)
|
||||
|
@ -223,7 +223,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
|||
.build(),
|
||||
new TopNQueryBuilder()
|
||||
.dataSource("base")
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension("dim1")
|
||||
.metric("cost")
|
||||
.threshold(4)
|
||||
|
|
|
@ -44,10 +44,10 @@ public class MaterializedViewQueryQueryToolChestTest
|
|||
public void testMakePostComputeManipulatorFn()
|
||||
{
|
||||
TimeseriesQuery realQuery = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.dayGran)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.aggregators(QueryRunnerTestHelper.rowsCount)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(QueryRunnerTestHelper.ROWS_COUNT)
|
||||
.descending(true)
|
||||
.build();
|
||||
MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null);
|
||||
|
@ -87,7 +87,7 @@ public class MaterializedViewQueryQueryToolChestTest
|
|||
|
||||
Assert.assertEquals(postResult.getTimestamp(), result.getTimestamp());
|
||||
Assert.assertEquals(postResultMap.size(), 2);
|
||||
Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.rowsCount.getName()), "metricvalue1");
|
||||
Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.ROWS_COUNT.getName()), "metricvalue1");
|
||||
Assert.assertEquals(postResultMap.get("dim1"), "dimvalue1");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,15 +43,15 @@ import java.io.IOException;
|
|||
|
||||
public class MaterializedViewQueryTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper();
|
||||
private DataSourceOptimizer optimizer;
|
||||
|
||||
@Before
|
||||
public void setUp()
|
||||
{
|
||||
jsonMapper.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE));
|
||||
JSON_MAPPER.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE));
|
||||
optimizer = EasyMock.createMock(DataSourceOptimizer.class);
|
||||
jsonMapper.setInjectableValues(
|
||||
JSON_MAPPER.setInjectableValues(
|
||||
new InjectableValues.Std()
|
||||
.addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE)
|
||||
.addValue(DataSourceOptimizer.class, optimizer)
|
||||
|
@ -62,16 +62,16 @@ public class MaterializedViewQueryTest
|
|||
public void testQuerySerialization() throws IOException
|
||||
{
|
||||
TopNQuery topNQuery = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
||||
.metric(QueryRunnerTestHelper.indexMetric)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||
.metric(QueryRunnerTestHelper.INDEX_METRIC)
|
||||
.threshold(4)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(
|
||||
Lists.newArrayList(
|
||||
Iterables.concat(
|
||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
||||
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||
Lists.newArrayList(
|
||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||
new DoubleMinAggregatorFactory("minIndex", "index")
|
||||
|
@ -79,14 +79,14 @@ public class MaterializedViewQueryTest
|
|||
)
|
||||
)
|
||||
)
|
||||
.postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
|
||||
.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)
|
||||
.build();
|
||||
MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
|
||||
String json = jsonMapper.writeValueAsString(query);
|
||||
Query serdeQuery = jsonMapper.readValue(json, Query.class);
|
||||
String json = JSON_MAPPER.writeValueAsString(query);
|
||||
Query serdeQuery = JSON_MAPPER.readValue(json, Query.class);
|
||||
Assert.assertEquals(query, serdeQuery);
|
||||
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.dataSource), query.getDataSource());
|
||||
Assert.assertEquals(QueryRunnerTestHelper.allGran, query.getGranularity());
|
||||
Assert.assertEquals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(), query.getIntervals());
|
||||
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource());
|
||||
Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity());
|
||||
Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,22 +60,22 @@ public class MovingAverageIterableTest
|
|||
private static final String AGE = "age";
|
||||
private static final String COUNTRY = "country";
|
||||
|
||||
private static final Map<String, Object> dims1 = new HashMap<>();
|
||||
private static final Map<String, Object> dims2 = new HashMap<>();
|
||||
private static final Map<String, Object> dims3 = new HashMap<>();
|
||||
private static final Map<String, Object> DIMS1 = new HashMap<>();
|
||||
private static final Map<String, Object> DIMS2 = new HashMap<>();
|
||||
private static final Map<String, Object> DIMS3 = new HashMap<>();
|
||||
|
||||
static {
|
||||
dims1.put(GENDER, "m");
|
||||
dims1.put(AGE, "10");
|
||||
dims1.put(COUNTRY, "US");
|
||||
DIMS1.put(GENDER, "m");
|
||||
DIMS1.put(AGE, "10");
|
||||
DIMS1.put(COUNTRY, "US");
|
||||
|
||||
dims2.put(GENDER, "f");
|
||||
dims2.put(AGE, "8");
|
||||
dims2.put(COUNTRY, "US");
|
||||
DIMS2.put(GENDER, "f");
|
||||
DIMS2.put(AGE, "8");
|
||||
DIMS2.put(COUNTRY, "US");
|
||||
|
||||
dims3.put(GENDER, "u");
|
||||
dims3.put(AGE, "5");
|
||||
dims3.put(COUNTRY, "UK");
|
||||
DIMS3.put(GENDER, "u");
|
||||
DIMS3.put(AGE, "5");
|
||||
DIMS3.put(COUNTRY, "UK");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -90,16 +90,16 @@ public class MovingAverageIterableTest
|
|||
|
||||
Sequence<RowBucket> dayBuckets = Sequences.simple(Arrays.asList(
|
||||
new RowBucket(JAN_1, Arrays.asList(
|
||||
new MapBasedRow(JAN_1, dims1),
|
||||
new MapBasedRow(JAN_1, dims2)
|
||||
new MapBasedRow(JAN_1, DIMS1),
|
||||
new MapBasedRow(JAN_1, DIMS2)
|
||||
)),
|
||||
new RowBucket(JAN_2, Collections.singletonList(
|
||||
new MapBasedRow(JAN_2, dims1)
|
||||
new MapBasedRow(JAN_2, DIMS1)
|
||||
)),
|
||||
new RowBucket(JAN_3, Collections.emptyList()),
|
||||
new RowBucket(JAN_4, Arrays.asList(
|
||||
new MapBasedRow(JAN_4, dims2),
|
||||
new MapBasedRow(JAN_4, dims3)
|
||||
new MapBasedRow(JAN_4, DIMS2),
|
||||
new MapBasedRow(JAN_4, DIMS3)
|
||||
))
|
||||
));
|
||||
|
||||
|
|
|
@ -125,7 +125,7 @@ public class MapVirtualColumnGroupByTest
|
|||
public void testWithMapColumn()
|
||||
{
|
||||
final GroupByQuery query = new GroupByQuery(
|
||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
||||
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
||||
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
||||
null,
|
||||
|
@ -148,7 +148,7 @@ public class MapVirtualColumnGroupByTest
|
|||
public void testWithSubColumn()
|
||||
{
|
||||
final GroupByQuery query = new GroupByQuery(
|
||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
||||
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
||||
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
||||
null,
|
||||
|
|
|
@ -133,9 +133,9 @@ public class MapVirtualColumnSelectTest
|
|||
private Druids.SelectQueryBuilder testBuilder()
|
||||
{
|
||||
return Druids.newSelectQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.pagingSpec(new PagingSpec(null, 3));
|
||||
}
|
||||
|
||||
|
@ -197,7 +197,7 @@ public class MapVirtualColumnSelectTest
|
|||
Assert.assertEquals(expected.size(), events.size());
|
||||
for (int i = 0; i < events.size(); i++) {
|
||||
Map event = events.get(i).getEvent();
|
||||
event.remove(EventHolder.timestampKey);
|
||||
event.remove(EventHolder.TIMESTAMP_KEY);
|
||||
Assert.assertEquals(expected.get(i), event);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ public class MapVirtualColumnTopNTest
|
|||
public void testWithMapColumn()
|
||||
{
|
||||
final TopNQuery query = new TopNQuery(
|
||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
||||
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||
VirtualColumns.create(
|
||||
ImmutableList.of(
|
||||
new MapVirtualColumn("keys", "values", "params")
|
||||
|
@ -111,7 +111,7 @@ public class MapVirtualColumnTopNTest
|
|||
public void testWithSubColumn()
|
||||
{
|
||||
final TopNQuery query = new TopNQuery(
|
||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
||||
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||
VirtualColumns.create(
|
||||
ImmutableList.of(
|
||||
new MapVirtualColumn("keys", "values", "params")
|
||||
|
|
|
@ -39,7 +39,7 @@ import java.nio.ByteBuffer;
|
|||
public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
||||
{
|
||||
|
||||
private static final DoublesSketchObjectStrategy strategy = new DoublesSketchObjectStrategy();
|
||||
private static final DoublesSketchObjectStrategy STRATEGY = new DoublesSketchObjectStrategy();
|
||||
|
||||
@Override
|
||||
public String getTypeName()
|
||||
|
@ -50,7 +50,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
|||
@Override
|
||||
public ObjectStrategy<DoublesSketch> getObjectStrategy()
|
||||
{
|
||||
return strategy;
|
||||
return STRATEGY;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -105,7 +105,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
|||
@Override
|
||||
public void deserializeColumn(final ByteBuffer buffer, final ColumnBuilder builder)
|
||||
{
|
||||
final GenericIndexed<DoublesSketch> column = GenericIndexed.read(buffer, strategy, builder.getFileMapper());
|
||||
final GenericIndexed<DoublesSketch> column = GenericIndexed.read(buffer, STRATEGY, builder.getFileMapper());
|
||||
builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column));
|
||||
}
|
||||
|
||||
|
|
|
@ -57,14 +57,14 @@ import java.util.stream.IntStream;
|
|||
|
||||
public class BloomFilterAggregatorTest
|
||||
{
|
||||
private static final String nullish = NullHandling.replaceWithDefault() ? "" : null;
|
||||
private static final List<String[]> values1 = dimensionValues(
|
||||
private static final String NULLISH = NullHandling.replaceWithDefault() ? "" : null;
|
||||
private static final List<String[]> VALUES1 = dimensionValues(
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
"a",
|
||||
"a",
|
||||
nullish,
|
||||
NULLISH,
|
||||
"b",
|
||||
"b",
|
||||
"b",
|
||||
|
@ -72,7 +72,7 @@ public class BloomFilterAggregatorTest
|
|||
"a",
|
||||
"a"
|
||||
);
|
||||
private static final List<String[]> values2 = dimensionValues(
|
||||
private static final List<String[]> VALUES2 = dimensionValues(
|
||||
"a",
|
||||
"b",
|
||||
"c",
|
||||
|
@ -80,17 +80,17 @@ public class BloomFilterAggregatorTest
|
|||
"a",
|
||||
"e",
|
||||
"b",
|
||||
new String[]{nullish, "x"},
|
||||
new String[]{"x", nullish},
|
||||
new String[]{NULLISH, "x"},
|
||||
new String[]{"x", NULLISH},
|
||||
new String[]{"y", "x"},
|
||||
new String[]{"x", "y"},
|
||||
new String[]{"x", "y", "a"}
|
||||
);
|
||||
private static final Double[] doubleValues1 = new Double[]{0.1, 1.5, 18.3, 0.1};
|
||||
private static final Float[] floatValues1 = new Float[]{0.4f, 0.8f, 23.2f};
|
||||
private static final Long[] longValues1 = new Long[]{10241L, 12312355L, 0L, 81L};
|
||||
private static final Double[] DOUBLE_VALUES1 = new Double[]{0.1, 1.5, 18.3, 0.1};
|
||||
private static final Float[] FLOAT_VALUES1 = new Float[]{0.4f, 0.8f, 23.2f};
|
||||
private static final Long[] LONG_VALUES1 = new Long[]{10241L, 12312355L, 0L, 81L};
|
||||
|
||||
private static final int maxNumValues = 15;
|
||||
private static final int MAX_NUM_VALUES = 15;
|
||||
|
||||
private static BloomKFilter filter1;
|
||||
private static BloomKFilter filter2;
|
||||
|
@ -104,31 +104,31 @@ public class BloomFilterAggregatorTest
|
|||
|
||||
static {
|
||||
try {
|
||||
filter1 = new BloomKFilter(maxNumValues);
|
||||
filter2 = new BloomKFilter(maxNumValues);
|
||||
BloomKFilter combinedValuesFilter = new BloomKFilter(maxNumValues);
|
||||
filter1 = new BloomKFilter(MAX_NUM_VALUES);
|
||||
filter2 = new BloomKFilter(MAX_NUM_VALUES);
|
||||
BloomKFilter combinedValuesFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||
|
||||
createStringFilter(values1, filter1, combinedValuesFilter);
|
||||
createStringFilter(values2, filter2, combinedValuesFilter);
|
||||
createStringFilter(VALUES1, filter1, combinedValuesFilter);
|
||||
createStringFilter(VALUES2, filter2, combinedValuesFilter);
|
||||
|
||||
serializedFilter1 = filterToString(filter1);
|
||||
serializedFilter2 = filterToString(filter2);
|
||||
serializedCombinedFilter = filterToString(combinedValuesFilter);
|
||||
|
||||
BloomKFilter longFilter = new BloomKFilter(maxNumValues);
|
||||
for (long val : longValues1) {
|
||||
BloomKFilter longFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||
for (long val : LONG_VALUES1) {
|
||||
longFilter.addLong(val);
|
||||
}
|
||||
serializedLongFilter = filterToString(longFilter);
|
||||
|
||||
BloomKFilter floatFilter = new BloomKFilter(maxNumValues);
|
||||
for (float val : floatValues1) {
|
||||
BloomKFilter floatFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||
for (float val : FLOAT_VALUES1) {
|
||||
floatFilter.addFloat(val);
|
||||
}
|
||||
serializedFloatFilter = filterToString(floatFilter);
|
||||
|
||||
BloomKFilter doubleFilter = new BloomKFilter(maxNumValues);
|
||||
for (double val : doubleValues1) {
|
||||
BloomKFilter doubleFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||
for (double val : DOUBLE_VALUES1) {
|
||||
doubleFilter.addDouble(val);
|
||||
}
|
||||
serializedDoubleFilter = filterToString(doubleFilter);
|
||||
|
@ -232,7 +232,7 @@ public class BloomFilterAggregatorTest
|
|||
valueAggregatorFactory = new BloomFilterAggregatorFactory(
|
||||
"billy",
|
||||
dimSpec,
|
||||
maxNumValues
|
||||
MAX_NUM_VALUES
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -240,10 +240,10 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testAggregateValues() throws IOException
|
||||
{
|
||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values1, null);
|
||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true);
|
||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null);
|
||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true);
|
||||
|
||||
for (int i = 0; i < values1.size(); ++i) {
|
||||
for (int i = 0; i < VALUES1.size(); ++i) {
|
||||
aggregateDimension(Collections.singletonList(dimSelector), agg);
|
||||
}
|
||||
|
||||
|
@ -257,10 +257,10 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testAggregateLongValues() throws IOException
|
||||
{
|
||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1));
|
||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1));
|
||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
for (Long ignored : longValues1) {
|
||||
for (Long ignored : LONG_VALUES1) {
|
||||
aggregateColumn(Collections.singletonList(selector), agg);
|
||||
}
|
||||
|
||||
|
@ -274,10 +274,10 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testAggregateFloatValues() throws IOException
|
||||
{
|
||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1));
|
||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1));
|
||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
for (Float ignored : floatValues1) {
|
||||
for (Float ignored : FLOAT_VALUES1) {
|
||||
aggregateColumn(Collections.singletonList(selector), agg);
|
||||
}
|
||||
|
||||
|
@ -291,10 +291,10 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testAggregateDoubleValues() throws IOException
|
||||
{
|
||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1));
|
||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1));
|
||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
for (Double ignored : doubleValues1) {
|
||||
for (Double ignored : DOUBLE_VALUES1) {
|
||||
aggregateColumn(Collections.singletonList(selector), agg);
|
||||
}
|
||||
|
||||
|
@ -308,8 +308,8 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testBufferAggregateStringValues() throws IOException
|
||||
{
|
||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values2, null);
|
||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true);
|
||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null);
|
||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true);
|
||||
|
||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||
|
@ -318,7 +318,7 @@ public class BloomFilterAggregatorTest
|
|||
|
||||
agg.init(buf, pos);
|
||||
|
||||
for (int i = 0; i < values2.size(); ++i) {
|
||||
for (int i = 0; i < VALUES2.size(); ++i) {
|
||||
bufferAggregateDimension(Collections.singletonList(dimSelector), agg, buf, pos);
|
||||
}
|
||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||
|
@ -331,8 +331,8 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testBufferAggregateLongValues() throws IOException
|
||||
{
|
||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1));
|
||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1));
|
||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||
|
@ -341,7 +341,7 @@ public class BloomFilterAggregatorTest
|
|||
|
||||
agg.init(buf, pos);
|
||||
|
||||
IntStream.range(0, longValues1.length)
|
||||
IntStream.range(0, LONG_VALUES1.length)
|
||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||
|
@ -353,8 +353,8 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testBufferAggregateFloatValues() throws IOException
|
||||
{
|
||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1));
|
||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1));
|
||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||
|
@ -363,7 +363,7 @@ public class BloomFilterAggregatorTest
|
|||
|
||||
agg.init(buf, pos);
|
||||
|
||||
IntStream.range(0, floatValues1.length)
|
||||
IntStream.range(0, FLOAT_VALUES1.length)
|
||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||
|
@ -375,8 +375,8 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testBufferAggregateDoubleValues() throws IOException
|
||||
{
|
||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1));
|
||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true);
|
||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1));
|
||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||
|
||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||
|
@ -385,7 +385,7 @@ public class BloomFilterAggregatorTest
|
|||
|
||||
agg.init(buf, pos);
|
||||
|
||||
IntStream.range(0, doubleValues1.length)
|
||||
IntStream.range(0, DOUBLE_VALUES1.length)
|
||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||
|
@ -397,16 +397,16 @@ public class BloomFilterAggregatorTest
|
|||
@Test
|
||||
public void testCombineValues() throws IOException
|
||||
{
|
||||
DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(values1, null);
|
||||
DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(values2, null);
|
||||
DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null);
|
||||
DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null);
|
||||
|
||||
StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, maxNumValues, true);
|
||||
StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, maxNumValues, true);
|
||||
StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, MAX_NUM_VALUES, true);
|
||||
StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, MAX_NUM_VALUES, true);
|
||||
|
||||
for (int i = 0; i < values1.size(); ++i) {
|
||||
for (int i = 0; i < VALUES1.size(); ++i) {
|
||||
aggregateDimension(Collections.singletonList(dimSelector1), agg1);
|
||||
}
|
||||
for (int i = 0; i < values2.size(); ++i) {
|
||||
for (int i = 0; i < VALUES2.size(); ++i) {
|
||||
aggregateDimension(Collections.singletonList(dimSelector2), agg2);
|
||||
}
|
||||
|
||||
|
@ -435,7 +435,7 @@ public class BloomFilterAggregatorTest
|
|||
);
|
||||
|
||||
BloomFilterMergeAggregator mergeAggregator =
|
||||
new BloomFilterMergeAggregator(mergeDim, maxNumValues, true);
|
||||
new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true);
|
||||
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
||||
|
@ -461,7 +461,7 @@ public class BloomFilterAggregatorTest
|
|||
);
|
||||
|
||||
BloomFilterMergeAggregator mergeAggregator =
|
||||
new BloomFilterMergeAggregator(mergeDim, maxNumValues, true);
|
||||
new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true);
|
||||
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
||||
|
@ -486,7 +486,7 @@ public class BloomFilterAggregatorTest
|
|||
)
|
||||
);
|
||||
|
||||
BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, maxNumValues, false);
|
||||
BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, false);
|
||||
|
||||
int maxSize = valueAggregatorFactory.getCombiningFactory().getMaxIntermediateSizeWithNulls();
|
||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||
|
@ -513,7 +513,7 @@ public class BloomFilterAggregatorTest
|
|||
BloomFilterAggregatorFactory factory = new BloomFilterAggregatorFactory(
|
||||
"billy",
|
||||
new DefaultDimensionSpec("b", "b"),
|
||||
maxNumValues
|
||||
MAX_NUM_VALUES
|
||||
);
|
||||
ObjectMapper objectMapper = new DefaultObjectMapper();
|
||||
new BloomFilterExtensionModule().getJacksonModules().forEach(objectMapper::registerModule);
|
||||
|
@ -536,7 +536,7 @@ public class BloomFilterAggregatorTest
|
|||
BloomFilterAggregatorFactory factory2 = new BloomFilterAggregatorFactory(
|
||||
"billy",
|
||||
new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)),
|
||||
maxNumValues
|
||||
MAX_NUM_VALUES
|
||||
);
|
||||
|
||||
Assert.assertEquals(
|
||||
|
@ -547,7 +547,7 @@ public class BloomFilterAggregatorTest
|
|||
BloomFilterAggregatorFactory factory3 = new BloomFilterAggregatorFactory(
|
||||
"billy",
|
||||
new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"),
|
||||
maxNumValues
|
||||
MAX_NUM_VALUES
|
||||
);
|
||||
Assert.assertEquals(
|
||||
factory3,
|
||||
|
|
|
@ -53,13 +53,13 @@ import java.util.List;
|
|||
@RunWith(Parameterized.class)
|
||||
public class BloomFilterGroupByQueryTest
|
||||
{
|
||||
private static final BloomFilterExtensionModule module = new BloomFilterExtensionModule();
|
||||
private static final BloomFilterExtensionModule MODULE = new BloomFilterExtensionModule();
|
||||
|
||||
static {
|
||||
// throwaway, just using to properly initialize jackson modules
|
||||
Guice.createInjector(
|
||||
binder -> binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()),
|
||||
module
|
||||
MODULE
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public class BloomFilterGroupByQueryTest
|
|||
public BloomFilterGroupByQueryTest(final GroupByQueryConfig config)
|
||||
{
|
||||
helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(
|
||||
Lists.newArrayList(module.getJacksonModules()),
|
||||
Lists.newArrayList(MODULE.getJacksonModules()),
|
||||
config,
|
||||
tempFolder
|
||||
);
|
||||
|
|
|
@ -96,7 +96,7 @@ public class BloomFilterSqlAggregatorTest
|
|||
{
|
||||
private static final int TEST_NUM_ENTRIES = 1000;
|
||||
private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
|
||||
private static final Injector injector = Guice.createInjector(
|
||||
private static final Injector INJECTOR = Guice.createInjector(
|
||||
binder -> {
|
||||
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
||||
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
||||
|
@ -111,7 +111,7 @@ public class BloomFilterSqlAggregatorTest
|
|||
);
|
||||
|
||||
private static ObjectMapper jsonMapper =
|
||||
injector
|
||||
INJECTOR
|
||||
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
||||
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ import java.util.Map;
|
|||
|
||||
public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
||||
{
|
||||
private static final Injector injector = Guice.createInjector(
|
||||
private static final Injector INJECTOR = Guice.createInjector(
|
||||
binder -> {
|
||||
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
||||
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
||||
|
@ -80,7 +80,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
|||
);
|
||||
|
||||
private static ObjectMapper jsonMapper =
|
||||
injector
|
||||
INJECTOR
|
||||
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
||||
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
||||
|
||||
|
@ -88,10 +88,10 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
|||
{
|
||||
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
|
||||
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
|
||||
exprMacros.add(injector.getInstance(clazz));
|
||||
exprMacros.add(INJECTOR.getInstance(clazz));
|
||||
}
|
||||
exprMacros.add(injector.getInstance(BloomFilterExprMacro.class));
|
||||
exprMacros.add(injector.getInstance(LookupExprMacro.class));
|
||||
exprMacros.add(INJECTOR.getInstance(BloomFilterExprMacro.class));
|
||||
exprMacros.add(INJECTOR.getInstance(LookupExprMacro.class));
|
||||
return new ExprMacroTable(exprMacros);
|
||||
}
|
||||
|
||||
|
@ -278,7 +278,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
|||
{
|
||||
final DruidOperatorTable operatorTable = new DruidOperatorTable(
|
||||
ImmutableSet.of(),
|
||||
ImmutableSet.of(injector.getInstance(BloomFilterOperatorConversion.class))
|
||||
ImmutableSet.of(INJECTOR.getInstance(BloomFilterOperatorConversion.class))
|
||||
);
|
||||
return getResults(
|
||||
plannerConfig,
|
||||
|
|
|
@ -52,7 +52,7 @@ import java.util.List;
|
|||
@RunWith(Parameterized.class)
|
||||
public class ApproximateHistogramGroupByQueryTest
|
||||
{
|
||||
private static final Closer resourceCloser = Closer.create();
|
||||
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||
|
||||
private final QueryRunner<Row> runner;
|
||||
private final GroupByQueryRunnerFactory factory;
|
||||
|
@ -124,7 +124,7 @@ public class ApproximateHistogramGroupByQueryTest
|
|||
config
|
||||
);
|
||||
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
||||
resourceCloser.register(factoryAndCloser.rhs);
|
||||
RESOURCE_CLOSER.register(factoryAndCloser.rhs);
|
||||
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
||||
final String testName = StringUtils.format(
|
||||
"config=%s, runner=%s",
|
||||
|
@ -152,7 +152,7 @@ public class ApproximateHistogramGroupByQueryTest
|
|||
@After
|
||||
public void teardown() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
RESOURCE_CLOSER.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -169,18 +169,18 @@ public class ApproximateHistogramGroupByQueryTest
|
|||
);
|
||||
|
||||
GroupByQuery query = new GroupByQuery.Builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.marketDimension,
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||
"marketalias"
|
||||
))
|
||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.setLimitSpec(
|
||||
new DefaultLimitSpec(
|
||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||
1
|
||||
)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||
.setPostAggregatorSpecs(
|
||||
Collections.singletonList(
|
||||
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
||||
|
@ -230,18 +230,18 @@ public class ApproximateHistogramGroupByQueryTest
|
|||
);
|
||||
|
||||
GroupByQuery query = new GroupByQuery.Builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.marketDimension,
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||
"marketalias"
|
||||
))
|
||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.setLimitSpec(
|
||||
new DefaultLimitSpec(
|
||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||
1
|
||||
)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||
.setPostAggregatorSpecs(
|
||||
Collections.singletonList(
|
||||
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
||||
|
|
|
@ -54,12 +54,12 @@ import java.util.Map;
|
|||
@RunWith(Parameterized.class)
|
||||
public class ApproximateHistogramTopNQueryTest
|
||||
{
|
||||
private static final Closer resourceCloser = Closer.create();
|
||||
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||
|
||||
@AfterClass
|
||||
public static void teardown() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
RESOURCE_CLOSER.close();
|
||||
}
|
||||
|
||||
@Parameterized.Parameters(name = "{0}")
|
||||
|
@ -70,8 +70,8 @@ public class ApproximateHistogramTopNQueryTest
|
|||
"TopNQueryRunnerFactory-bufferPool",
|
||||
() -> ByteBuffer.allocate(2000)
|
||||
);
|
||||
resourceCloser.register(defaultPool);
|
||||
resourceCloser.register(customPool);
|
||||
RESOURCE_CLOSER.register(defaultPool);
|
||||
RESOURCE_CLOSER.register(customPool);
|
||||
|
||||
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
||||
Iterables.concat(
|
||||
|
@ -122,16 +122,16 @@ public class ApproximateHistogramTopNQueryTest
|
|||
);
|
||||
|
||||
TopNQuery query = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
||||
.threshold(4)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(
|
||||
Lists.newArrayList(
|
||||
Iterables.concat(
|
||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
||||
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||
Lists.newArrayList(
|
||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||
new DoubleMinAggregatorFactory("minIndex", "index"),
|
||||
|
@ -141,8 +141,8 @@ public class ApproximateHistogramTopNQueryTest
|
|||
)
|
||||
)
|
||||
.postAggregators(
|
||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
||||
QueryRunnerTestHelper.dependentPostAgg,
|
||||
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||
QueryRunnerTestHelper.DEPENDENT_POST_AGG,
|
||||
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
||||
)
|
||||
.build();
|
||||
|
@ -153,7 +153,7 @@ public class ApproximateHistogramTopNQueryTest
|
|||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "total_market")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
|
@ -184,7 +184,7 @@ public class ApproximateHistogramTopNQueryTest
|
|||
)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "upfront")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
|
@ -215,7 +215,7 @@ public class ApproximateHistogramTopNQueryTest
|
|||
)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "spot")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
|
|
|
@ -52,7 +52,7 @@ import java.util.List;
|
|||
@RunWith(Parameterized.class)
|
||||
public class FixedBucketsHistogramGroupByQueryTest
|
||||
{
|
||||
private static final Closer resourceCloser = Closer.create();
|
||||
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||
|
||||
private final QueryRunner<Row> runner;
|
||||
private final GroupByQueryRunnerFactory factory;
|
||||
|
@ -124,7 +124,7 @@ public class FixedBucketsHistogramGroupByQueryTest
|
|||
config
|
||||
);
|
||||
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
||||
resourceCloser.register(factoryAndCloser.rhs);
|
||||
RESOURCE_CLOSER.register(factoryAndCloser.rhs);
|
||||
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
||||
final String testName = StringUtils.format(
|
||||
"config=%s, runner=%s",
|
||||
|
@ -153,7 +153,7 @@ public class FixedBucketsHistogramGroupByQueryTest
|
|||
@After
|
||||
public void teardown() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
RESOURCE_CLOSER.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -170,18 +170,18 @@ public class FixedBucketsHistogramGroupByQueryTest
|
|||
);
|
||||
|
||||
GroupByQuery query = new GroupByQuery.Builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.marketDimension,
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||
"marketalias"
|
||||
))
|
||||
.setInterval(QueryRunnerTestHelper.fullOnInterval)
|
||||
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL)
|
||||
.setLimitSpec(
|
||||
new DefaultLimitSpec(
|
||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||
1
|
||||
)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||
.setPostAggregatorSpecs(
|
||||
Collections.singletonList(
|
||||
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
||||
|
@ -231,18 +231,18 @@ public class FixedBucketsHistogramGroupByQueryTest
|
|||
);
|
||||
|
||||
GroupByQuery query = new GroupByQuery.Builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.marketDimension,
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||
"marketalias"
|
||||
))
|
||||
.setInterval(QueryRunnerTestHelper.fullOnInterval)
|
||||
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL)
|
||||
.setLimitSpec(
|
||||
new DefaultLimitSpec(
|
||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||
1
|
||||
)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
||||
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||
.setPostAggregatorSpecs(
|
||||
Collections.singletonList(
|
||||
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
||||
|
|
|
@ -54,12 +54,12 @@ import java.util.Map;
|
|||
@RunWith(Parameterized.class)
|
||||
public class FixedBucketsHistogramTopNQueryTest
|
||||
{
|
||||
private static final Closer resourceCloser = Closer.create();
|
||||
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||
|
||||
@AfterClass
|
||||
public static void teardown() throws IOException
|
||||
{
|
||||
resourceCloser.close();
|
||||
RESOURCE_CLOSER.close();
|
||||
}
|
||||
|
||||
@Parameterized.Parameters(name = "{0}")
|
||||
|
@ -70,8 +70,8 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
"TopNQueryRunnerFactory-bufferPool",
|
||||
() -> ByteBuffer.allocate(2000)
|
||||
);
|
||||
resourceCloser.register(defaultPool);
|
||||
resourceCloser.register(customPool);
|
||||
RESOURCE_CLOSER.register(defaultPool);
|
||||
RESOURCE_CLOSER.register(customPool);
|
||||
|
||||
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
||||
Iterables.concat(
|
||||
|
@ -122,16 +122,16 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
);
|
||||
|
||||
TopNQuery query = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
||||
.threshold(4)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(
|
||||
Lists.newArrayList(
|
||||
Iterables.concat(
|
||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
||||
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||
Lists.newArrayList(
|
||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||
new DoubleMinAggregatorFactory("minIndex", "index"),
|
||||
|
@ -141,8 +141,8 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
)
|
||||
)
|
||||
.postAggregators(
|
||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
||||
QueryRunnerTestHelper.dependentPostAgg,
|
||||
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||
QueryRunnerTestHelper.DEPENDENT_POST_AGG,
|
||||
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
||||
)
|
||||
.build();
|
||||
|
@ -153,7 +153,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
new TopNResultValue(
|
||||
Arrays.<Map<String, Object>>asList(
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "total_market")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market")
|
||||
.put("rows", 186L)
|
||||
.put("index", 215679.82879638672D)
|
||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||
|
@ -180,7 +180,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "upfront")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront")
|
||||
.put("rows", 186L)
|
||||
.put("index", 192046.1060180664D)
|
||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||
|
@ -207,7 +207,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
|||
)
|
||||
.build(),
|
||||
ImmutableMap.<String, Object>builder()
|
||||
.put(QueryRunnerTestHelper.marketDimension, "spot")
|
||||
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot")
|
||||
.put("rows", 837L)
|
||||
.put("index", 95606.57232284546D)
|
||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||
|
|
|
@ -64,8 +64,8 @@ import java.util.concurrent.ThreadLocalRandom;
|
|||
public class TestKafkaExtractionCluster
|
||||
{
|
||||
private static final Logger log = new Logger(TestKafkaExtractionCluster.class);
|
||||
private static final String topicName = "testTopic";
|
||||
private static final Map<String, String> kafkaProperties = new HashMap<>();
|
||||
private static final String TOPIC_NAME = "testTopic";
|
||||
private static final Map<String, String> KAFKA_PROPERTIES = new HashMap<>();
|
||||
|
||||
@Rule
|
||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||
|
@ -81,7 +81,7 @@ public class TestKafkaExtractionCluster
|
|||
private static List<ProducerRecord<byte[], byte[]>> generateRecords()
|
||||
{
|
||||
return ImmutableList.of(
|
||||
new ProducerRecord<>(topicName, 0,
|
||||
new ProducerRecord<>(TOPIC_NAME, 0,
|
||||
StringUtils.toUtf8("abcdefg"),
|
||||
StringUtils.toUtf8("abcdefg")));
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ public class TestKafkaExtractionCluster
|
|||
|
||||
final KafkaLookupExtractorFactory kafkaLookupExtractorFactory = new KafkaLookupExtractorFactory(
|
||||
null,
|
||||
topicName,
|
||||
TOPIC_NAME,
|
||||
consumerProperties
|
||||
);
|
||||
|
||||
|
@ -149,7 +149,7 @@ public class TestKafkaExtractionCluster
|
|||
@Nonnull
|
||||
private Map<String, String> getConsumerProperties()
|
||||
{
|
||||
final Map<String, String> props = new HashMap<>(kafkaProperties);
|
||||
final Map<String, String> props = new HashMap<>(KAFKA_PROPERTIES);
|
||||
int port = kafkaServer.socketServer().config().port();
|
||||
props.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
||||
return props;
|
||||
|
@ -168,7 +168,7 @@ public class TestKafkaExtractionCluster
|
|||
private KafkaConfig getBrokerProperties() throws IOException
|
||||
{
|
||||
final Properties serverProperties = new Properties();
|
||||
serverProperties.putAll(kafkaProperties);
|
||||
serverProperties.putAll(KAFKA_PROPERTIES);
|
||||
serverProperties.put("broker.id", "0");
|
||||
serverProperties.put("zookeeper.connect", zkServer.getConnectString());
|
||||
serverProperties.put("port", String.valueOf(ThreadLocalRandom.current().nextInt(9999) + 10000));
|
||||
|
@ -193,13 +193,13 @@ public class TestKafkaExtractionCluster
|
|||
private Properties makeProducerProperties()
|
||||
{
|
||||
final Properties kafkaProducerProperties = new Properties();
|
||||
kafkaProducerProperties.putAll(kafkaProperties);
|
||||
kafkaProducerProperties.putAll(KAFKA_PROPERTIES);
|
||||
int port = kafkaServer.socketServer().config().port();
|
||||
kafkaProducerProperties.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
||||
kafkaProducerProperties.put("key.serializer", ByteArraySerializer.class.getName());
|
||||
kafkaProducerProperties.put("value.serializer", ByteArraySerializer.class.getName());
|
||||
kafkaProducerProperties.put("acks", "all");
|
||||
kafkaProperties.put("request.required.acks", "1");
|
||||
KAFKA_PROPERTIES.put("request.required.acks", "1");
|
||||
return kafkaProducerProperties;
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,7 @@ public class TestKafkaExtractionCluster
|
|||
long events = factory.getCompletedEventCount();
|
||||
|
||||
log.info("------------------------- Sending foo bar -------------------------------");
|
||||
producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar")));
|
||||
producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar")));
|
||||
|
||||
long start = System.currentTimeMillis();
|
||||
while (events == factory.getCompletedEventCount()) {
|
||||
|
@ -241,7 +241,7 @@ public class TestKafkaExtractionCluster
|
|||
events = factory.getCompletedEventCount();
|
||||
|
||||
log.info("------------------------- Sending baz bat -------------------------------");
|
||||
producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat")));
|
||||
producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat")));
|
||||
while (events == factory.getCompletedEventCount()) {
|
||||
Thread.sleep(10);
|
||||
if (System.currentTimeMillis() > start + 60_000) {
|
||||
|
|
|
@ -72,7 +72,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
private static final ObjectMapper objectMapper = new DefaultObjectMapper();
|
||||
private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper();
|
||||
private static final String TEST_ID = "test-id";
|
||||
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
||||
private static final String TEST_HOST = "test-host";
|
||||
|
@ -111,7 +111,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
response = createMock(HttpResponse.class);
|
||||
headers = createMock(HttpHeaders.class);
|
||||
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider);
|
||||
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
||||
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
||||
.anyTimes();
|
||||
|
@ -285,7 +285,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
@Test
|
||||
public void testGetCurrentOffsetsWithRetry() throws Exception
|
||||
{
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3);
|
||||
|
||||
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
||||
|
@ -330,7 +330,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
expectedException.expect(RuntimeException.class);
|
||||
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
||||
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||
|
||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
||||
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
||||
|
@ -385,7 +385,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
|||
@Test
|
||||
public void testGetStartTime() throws Exception
|
||||
{
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
||||
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||
DateTime now = DateTimes.nowUtc();
|
||||
|
||||
Capture<Request> captured = Capture.newInstance();
|
||||
|
|
|
@ -54,8 +54,8 @@ public class KafkaRecordSupplierTest
|
|||
private static long poll_timeout_millis = 1000;
|
||||
private static int pollRetry = 5;
|
||||
private static int topicPosFix = 0;
|
||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||
|
||||
private static TestingCluster zkServer;
|
||||
private static TestBroker kafkaServer;
|
||||
|
||||
|
@ -126,28 +126,28 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
public static class TestKafkaDeserializer implements Deserializer<byte[]>
|
||||
{
|
||||
@Override
|
||||
public void configure(Map<String, ?> map, boolean b)
|
||||
{
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void close()
|
||||
{
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public byte[] deserialize(String topic, byte[] data)
|
||||
{
|
||||
return data;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@BeforeClass
|
||||
public static void setupClass() throws Exception
|
||||
{
|
||||
|
@ -194,7 +194,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
||||
|
||||
|
@ -205,77 +205,77 @@ public class KafkaRecordSupplierTest
|
|||
|
||||
recordSupplier.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSupplierSetupCustomDeserializer() throws ExecutionException, InterruptedException
|
||||
{
|
||||
|
||||
|
||||
// Insert data
|
||||
insertData();
|
||||
|
||||
|
||||
Set<StreamPartition<Integer>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(topic, 0),
|
||||
StreamPartition.of(topic, 1)
|
||||
);
|
||||
|
||||
|
||||
Map<String, Object> properties = kafkaServer.consumerProperties();
|
||||
properties.put("key.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName());
|
||||
properties.put("value.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName());
|
||||
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
properties,
|
||||
objectMapper
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
|
||||
|
||||
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
||||
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
|
||||
|
||||
Assert.assertEquals(partitions, recordSupplier.getAssignment());
|
||||
Assert.assertEquals(ImmutableSet.of(0, 1), recordSupplier.getPartitionIds(topic));
|
||||
|
||||
|
||||
recordSupplier.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPollCustomDeserializer() throws InterruptedException, ExecutionException
|
||||
{
|
||||
|
||||
|
||||
// Insert data
|
||||
insertData();
|
||||
|
||||
|
||||
Set<StreamPartition<Integer>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(topic, 0),
|
||||
StreamPartition.of(topic, 1)
|
||||
);
|
||||
|
||||
|
||||
Map<String, Object> properties = kafkaServer.consumerProperties();
|
||||
properties.put("key.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName());
|
||||
properties.put("value.deserializer", KafkaRecordSupplierTest.TestKafkaDeserializer.class.getName());
|
||||
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
properties,
|
||||
objectMapper
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
||||
|
||||
List<OrderedPartitionableRecord<Integer, Long>> initialRecords = new ArrayList<>(createOrderedPartitionableRecords());
|
||||
|
||||
|
||||
List<OrderedPartitionableRecord<Integer, Long>> polledRecords = recordSupplier.poll(poll_timeout_millis);
|
||||
for (int i = 0; polledRecords.size() != initialRecords.size() && i < pollRetry; i++) {
|
||||
polledRecords.addAll(recordSupplier.poll(poll_timeout_millis));
|
||||
Thread.sleep(200);
|
||||
}
|
||||
|
||||
|
||||
Assert.assertEquals(partitions, recordSupplier.getAssignment());
|
||||
Assert.assertEquals(initialRecords.size(), polledRecords.size());
|
||||
Assert.assertTrue(initialRecords.containsAll(polledRecords));
|
||||
|
||||
|
||||
recordSupplier.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPoll() throws InterruptedException, ExecutionException
|
||||
{
|
||||
|
@ -289,7 +289,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
@ -330,7 +330,7 @@ public class KafkaRecordSupplierTest
|
|||
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
@ -401,7 +401,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
@ -444,7 +444,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
@ -477,7 +477,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
|
||||
|
@ -503,7 +503,7 @@ public class KafkaRecordSupplierTest
|
|||
);
|
||||
|
||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||
kafkaServer.consumerProperties(), objectMapper);
|
||||
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seekToEarliest(partitions);
|
||||
|
|
|
@ -62,11 +62,11 @@ import java.util.Map;
|
|||
|
||||
public class KafkaSamplerSpecTest
|
||||
{
|
||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
||||
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||
private static final String TOPIC = "sampling";
|
||||
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
||||
"test_ds",
|
||||
objectMapper.convertValue(
|
||||
OBJECT_MAPPER.convertValue(
|
||||
new StringInputRowParser(
|
||||
new JSONParseSpec(
|
||||
new TimestampSpec("timestamp", "iso", null),
|
||||
|
@ -94,7 +94,7 @@ public class KafkaSamplerSpecTest
|
|||
},
|
||||
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
||||
null,
|
||||
objectMapper
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
|
||||
private static TestingCluster zkServer;
|
||||
|
@ -167,8 +167,8 @@ public class KafkaSamplerSpecTest
|
|||
KafkaSamplerSpec samplerSpec = new KafkaSamplerSpec(
|
||||
supervisorSpec,
|
||||
new SamplerConfig(5, null, null, null),
|
||||
new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))),
|
||||
objectMapper
|
||||
new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))),
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
|
||||
SamplerResponse response = samplerSpec.sample();
|
||||
|
|
|
@ -122,7 +122,7 @@ import java.util.concurrent.Executor;
|
|||
@RunWith(Parameterized.class)
|
||||
public class KafkaSupervisorTest extends EasyMockSupport
|
||||
{
|
||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
||||
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||
private static final String TOPIC_PREFIX = "testTopic";
|
||||
private static final String DATASOURCE = "testDS";
|
||||
private static final int NUM_PARTITIONS = 3;
|
||||
|
@ -237,7 +237,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
final Map<String, Object> contexts = supervisor.createIndexTasks(
|
||||
1,
|
||||
"seq",
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new TreeMap<>(),
|
||||
new KafkaIndexTaskIOConfig(
|
||||
0,
|
||||
|
@ -3393,7 +3393,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new KafkaSupervisorSpec(
|
||||
dataSchema,
|
||||
tuningConfig,
|
||||
|
@ -3404,7 +3404,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new NoopServiceEmitter(),
|
||||
new DruidMonitorSchedulerConfig(),
|
||||
rowIngestionMetersFactory,
|
||||
|
@ -3500,7 +3500,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new KafkaSupervisorSpec(
|
||||
dataSchema,
|
||||
tuningConfig,
|
||||
|
@ -3511,7 +3511,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new NoopServiceEmitter(),
|
||||
new DruidMonitorSchedulerConfig(),
|
||||
rowIngestionMetersFactory,
|
||||
|
@ -3584,7 +3584,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new KafkaSupervisorSpec(
|
||||
dataSchema,
|
||||
tuningConfig,
|
||||
|
@ -3595,7 +3595,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
taskMaster,
|
||||
indexerMetadataStorageCoordinator,
|
||||
taskClientFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new NoopServiceEmitter(),
|
||||
new DruidMonitorSchedulerConfig(),
|
||||
rowIngestionMetersFactory,
|
||||
|
@ -3613,7 +3613,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
|
||||
return new DataSchema(
|
||||
dataSource,
|
||||
objectMapper.convertValue(
|
||||
OBJECT_MAPPER.convertValue(
|
||||
new StringInputRowParser(
|
||||
new JSONParseSpec(
|
||||
new TimestampSpec("timestamp", "iso", null),
|
||||
|
@ -3636,7 +3636,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
ImmutableList.of()
|
||||
),
|
||||
null,
|
||||
objectMapper
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -3717,7 +3717,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
|||
null,
|
||||
null,
|
||||
rowIngestionMetersFactory,
|
||||
objectMapper,
|
||||
OBJECT_MAPPER,
|
||||
new DummyForInjectionAppenderatorsManager()
|
||||
);
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
|||
@Rule
|
||||
public ExpectedException expectedException = ExpectedException.none();
|
||||
|
||||
private static final ObjectMapper objectMapper = new DefaultObjectMapper();
|
||||
private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper();
|
||||
private static final String TEST_ID = "test-id";
|
||||
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
||||
private static final String TEST_HOST = "test-host";
|
||||
|
@ -112,7 +112,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
|||
response = createMock(HttpResponse.class);
|
||||
headers = createMock(HttpHeaders.class);
|
||||
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider);
|
||||
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
||||
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
||||
.anyTimes();
|
||||
|
@ -286,7 +286,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
|||
@Test
|
||||
public void testGetCurrentOffsetsWithRetry() throws Exception
|
||||
{
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3);
|
||||
|
||||
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
||||
|
@ -331,7 +331,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
|||
expectedException.expect(RuntimeException.class);
|
||||
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
||||
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||
|
||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
||||
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
||||
|
@ -386,7 +386,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
|||
@Test
|
||||
public void testGetStartTime() throws Exception
|
||||
{
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
||||
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||
DateTime now = DateTimes.nowUtc();
|
||||
|
||||
Capture<Request> captured = Capture.newInstance();
|
||||
|
|
|
@ -53,7 +53,7 @@ import java.util.stream.Collectors;
|
|||
|
||||
public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||
{
|
||||
private static final String stream = "stream";
|
||||
private static final String STREAM = "stream";
|
||||
private static final long POLL_TIMEOUT_MILLIS = 2000;
|
||||
private static final String SHARD_ID1 = "1";
|
||||
private static final String SHARD_ID0 = "0";
|
||||
|
@ -78,7 +78,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
private static final List<Object> ALL_RECORDS = ImmutableList.builder()
|
||||
.addAll(SHARD0_RECORDS.stream()
|
||||
.map(x -> new OrderedPartitionableRecord<>(
|
||||
stream,
|
||||
STREAM,
|
||||
SHARD_ID0,
|
||||
x.getSequenceNumber(),
|
||||
Collections
|
||||
|
@ -91,7 +91,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
.toList()))
|
||||
.addAll(SHARD1_RECORDS.stream()
|
||||
.map(x -> new OrderedPartitionableRecord<>(
|
||||
stream,
|
||||
STREAM,
|
||||
SHARD_ID1,
|
||||
x.getSequenceNumber(),
|
||||
Collections
|
||||
|
@ -182,8 +182,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
replayAll();
|
||||
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(stream, SHARD_ID0),
|
||||
StreamPartition.of(stream, SHARD_ID1)
|
||||
StreamPartition.of(STREAM, SHARD_ID0),
|
||||
StreamPartition.of(STREAM, SHARD_ID1)
|
||||
);
|
||||
|
||||
recordSupplier = new KinesisRecordSupplier(
|
||||
|
@ -204,13 +204,13 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
recordSupplier.assign(partitions);
|
||||
|
||||
Assert.assertEquals(partitions, recordSupplier.getAssignment());
|
||||
Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(stream));
|
||||
Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(STREAM));
|
||||
Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100));
|
||||
|
||||
verifyAll();
|
||||
|
||||
final DescribeStreamRequest expectedRequest = new DescribeStreamRequest();
|
||||
expectedRequest.setStreamName(stream);
|
||||
expectedRequest.setStreamName(STREAM);
|
||||
expectedRequest.setExclusiveStartShardId("0");
|
||||
Assert.assertEquals(expectedRequest, capturedRequest.getValue());
|
||||
}
|
||||
|
@ -266,8 +266,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
replayAll();
|
||||
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(stream, SHARD_ID0),
|
||||
StreamPartition.of(stream, SHARD_ID1)
|
||||
StreamPartition.of(STREAM, SHARD_ID0),
|
||||
StreamPartition.of(STREAM, SHARD_ID1)
|
||||
);
|
||||
|
||||
|
||||
|
@ -338,8 +338,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
|
||||
replayAll();
|
||||
|
||||
StreamPartition<String> shard0Partition = StreamPartition.of(stream, SHARD_ID0);
|
||||
StreamPartition<String> shard1Partition = StreamPartition.of(stream, SHARD_ID1);
|
||||
StreamPartition<String> shard0Partition = StreamPartition.of(STREAM, SHARD_ID0);
|
||||
StreamPartition<String> shard1Partition = StreamPartition.of(STREAM, SHARD_ID1);
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
shard0Partition,
|
||||
shard1Partition
|
||||
|
@ -405,8 +405,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
|
||||
replayAll();
|
||||
|
||||
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
|
||||
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
|
||||
StreamPartition<String> shard0 = StreamPartition.of(STREAM, SHARD_ID0);
|
||||
StreamPartition<String> shard1 = StreamPartition.of(STREAM, SHARD_ID1);
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
shard0,
|
||||
shard1
|
||||
|
@ -440,8 +440,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
@Test(expected = ISE.class)
|
||||
public void testSeekUnassigned() throws InterruptedException
|
||||
{
|
||||
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
|
||||
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
|
||||
StreamPartition<String> shard0 = StreamPartition.of(STREAM, SHARD_ID0);
|
||||
StreamPartition<String> shard1 = StreamPartition.of(STREAM, SHARD_ID1);
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
shard1
|
||||
);
|
||||
|
@ -503,7 +503,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
replayAll();
|
||||
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(stream, SHARD_ID1)
|
||||
StreamPartition.of(STREAM, SHARD_ID1)
|
||||
);
|
||||
|
||||
recordSupplier = new KinesisRecordSupplier(
|
||||
|
@ -520,7 +520,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
);
|
||||
|
||||
recordSupplier.assign(partitions);
|
||||
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "5");
|
||||
recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "5");
|
||||
recordSupplier.start();
|
||||
|
||||
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) {
|
||||
|
@ -534,7 +534,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
firstRecord
|
||||
);
|
||||
|
||||
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "7");
|
||||
recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "7");
|
||||
recordSupplier.start();
|
||||
|
||||
while (recordSupplier.bufferSize() < 4) {
|
||||
|
@ -585,8 +585,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
|||
replayAll();
|
||||
|
||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||
StreamPartition.of(stream, SHARD_ID0),
|
||||
StreamPartition.of(stream, SHARD_ID1)
|
||||
StreamPartition.of(STREAM, SHARD_ID0),
|
||||
StreamPartition.of(STREAM, SHARD_ID1)
|
||||
);
|
||||
|
||||
|
||||
|
|
|
@ -66,12 +66,12 @@ import java.util.Map;
|
|||
|
||||
public class KinesisSamplerSpecTest extends EasyMockSupport
|
||||
{
|
||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
||||
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||
private static final String STREAM = "sampling";
|
||||
private static final String SHARD_ID = "1";
|
||||
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
||||
"test_ds",
|
||||
objectMapper.convertValue(
|
||||
OBJECT_MAPPER.convertValue(
|
||||
new StringInputRowParser(
|
||||
new JSONParseSpec(
|
||||
new TimestampSpec("timestamp", "iso", null),
|
||||
|
@ -99,7 +99,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport
|
|||
},
|
||||
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
||||
null,
|
||||
objectMapper
|
||||
OBJECT_MAPPER
|
||||
);
|
||||
|
||||
private final KinesisRecordSupplier recordSupplier = mock(KinesisRecordSupplier.class);
|
||||
|
@ -183,7 +183,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport
|
|||
KinesisSamplerSpec samplerSpec = new TestableKinesisSamplerSpec(
|
||||
supervisorSpec,
|
||||
new SamplerConfig(5, null, null, null),
|
||||
new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))),
|
||||
new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))),
|
||||
null
|
||||
);
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -51,7 +51,7 @@ import java.util.Map;
|
|||
*/
|
||||
public class NamespacedExtractorModuleTest
|
||||
{
|
||||
private static final ObjectMapper mapper = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper());
|
||||
private static final ObjectMapper MAPPER = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper());
|
||||
private CacheScheduler scheduler;
|
||||
private Lifecycle lifecycle;
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class NamespacedExtractorModuleTest
|
|||
{
|
||||
final File tmpFile = temporaryFolder.newFile();
|
||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
}
|
||||
final UriCacheGenerator factory = new UriCacheGenerator(
|
||||
ImmutableMap.of("file", new LocalFileTimestampVersionFinder())
|
||||
|
@ -119,7 +119,7 @@ public class NamespacedExtractorModuleTest
|
|||
{
|
||||
final File tmpFile = temporaryFolder.newFile();
|
||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
}
|
||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||
tmpFile.toURI(),
|
||||
|
@ -140,7 +140,7 @@ public class NamespacedExtractorModuleTest
|
|||
{
|
||||
final File tmpFile = temporaryFolder.newFile();
|
||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
}
|
||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||
tmpFile.toURI(),
|
||||
|
@ -161,7 +161,7 @@ public class NamespacedExtractorModuleTest
|
|||
{
|
||||
final File tmpFile = temporaryFolder.newFile();
|
||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||
}
|
||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||
tmpFile.toURI(),
|
||||
|
|
|
@ -70,12 +70,12 @@ public class JdbcExtractionNamespaceTest
|
|||
@Rule
|
||||
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
|
||||
private static final Logger log = new Logger(JdbcExtractionNamespaceTest.class);
|
||||
private static final String tableName = "abstractDbRenameTest";
|
||||
private static final String keyName = "keyName";
|
||||
private static final String valName = "valName";
|
||||
private static final String tsColumn_ = "tsColumn";
|
||||
private static final String filterColumn = "filterColumn";
|
||||
private static final Map<String, String[]> renames = ImmutableMap.of(
|
||||
private static final String TABLE_NAME = "abstractDbRenameTest";
|
||||
private static final String KEY_NAME = "keyName";
|
||||
private static final String VAL_NAME = "valName";
|
||||
private static final String TS_COLUMN = "tsColumn";
|
||||
private static final String FILTER_COLUMN = "filterColumn";
|
||||
private static final Map<String, String[]> RENAMES = ImmutableMap.of(
|
||||
"foo", new String[]{"bar", "1"},
|
||||
"bad", new String[]{"bar", "1"},
|
||||
"how about that", new String[]{"foo", "0"},
|
||||
|
@ -129,22 +129,22 @@ public class JdbcExtractionNamespaceTest
|
|||
handle.createStatement(
|
||||
StringUtils.format(
|
||||
"CREATE TABLE %s (%s TIMESTAMP, %s VARCHAR(64), %s VARCHAR(64), %s VARCHAR(64))",
|
||||
tableName,
|
||||
tsColumn_,
|
||||
filterColumn,
|
||||
keyName,
|
||||
valName
|
||||
TABLE_NAME,
|
||||
TS_COLUMN,
|
||||
FILTER_COLUMN,
|
||||
KEY_NAME,
|
||||
VAL_NAME
|
||||
)
|
||||
).setQueryTimeout(1).execute()
|
||||
);
|
||||
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
|
||||
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", TABLE_NAME)).setQueryTimeout(1).execute();
|
||||
handle.commit();
|
||||
closer.register(new Closeable()
|
||||
{
|
||||
@Override
|
||||
public void close() throws IOException
|
||||
{
|
||||
handle.createStatement("DROP TABLE " + tableName).setQueryTimeout(1).execute();
|
||||
handle.createStatement("DROP TABLE " + TABLE_NAME).setQueryTimeout(1).execute();
|
||||
final ListenableFuture future = setupTeardownService.submit(new Runnable()
|
||||
{
|
||||
@Override
|
||||
|
@ -179,7 +179,7 @@ public class JdbcExtractionNamespaceTest
|
|||
Assert.assertEquals(0, scheduler.getActiveEntries());
|
||||
}
|
||||
});
|
||||
for (Map.Entry<String, String[]> entry : renames.entrySet()) {
|
||||
for (Map.Entry<String, String[]> entry : RENAMES.entrySet()) {
|
||||
try {
|
||||
String key = entry.getKey();
|
||||
String value = entry.getValue()[0];
|
||||
|
@ -338,19 +338,19 @@ public class JdbcExtractionNamespaceTest
|
|||
final String statementVal = val != null ? "'%s'" : "%s";
|
||||
if (tsColumn == null) {
|
||||
handle.createStatement(
|
||||
StringUtils.format("DELETE FROM %s WHERE %s='%s'", tableName, keyName, key)
|
||||
StringUtils.format("DELETE FROM %s WHERE %s='%s'", TABLE_NAME, KEY_NAME, key)
|
||||
).setQueryTimeout(1).execute();
|
||||
query = StringUtils.format(
|
||||
"INSERT INTO %s (%s, %s, %s) VALUES ('%s', '%s', " + statementVal + ")",
|
||||
tableName,
|
||||
filterColumn, keyName, valName,
|
||||
TABLE_NAME,
|
||||
FILTER_COLUMN, KEY_NAME, VAL_NAME,
|
||||
filter, key, val
|
||||
);
|
||||
} else {
|
||||
query = StringUtils.format(
|
||||
"INSERT INTO %s (%s, %s, %s, %s) VALUES ('%s', '%s', '%s', " + statementVal + ")",
|
||||
tableName,
|
||||
tsColumn, filterColumn, keyName, valName,
|
||||
TABLE_NAME,
|
||||
tsColumn, FILTER_COLUMN, KEY_NAME, VAL_NAME,
|
||||
updateTs, filter, key, val
|
||||
);
|
||||
}
|
||||
|
@ -367,9 +367,9 @@ public class JdbcExtractionNamespaceTest
|
|||
{
|
||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||
tableName,
|
||||
keyName,
|
||||
valName,
|
||||
TABLE_NAME,
|
||||
KEY_NAME,
|
||||
VAL_NAME,
|
||||
tsColumn,
|
||||
null,
|
||||
new Period(0)
|
||||
|
@ -378,7 +378,7 @@ public class JdbcExtractionNamespaceTest
|
|||
CacheSchedulerTest.waitFor(entry);
|
||||
final Map<String, String> map = entry.getCache();
|
||||
|
||||
for (Map.Entry<String, String[]> e : renames.entrySet()) {
|
||||
for (Map.Entry<String, String[]> e : RENAMES.entrySet()) {
|
||||
String key = e.getKey();
|
||||
String[] val = e.getValue();
|
||||
String field = val[0];
|
||||
|
@ -398,18 +398,18 @@ public class JdbcExtractionNamespaceTest
|
|||
{
|
||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||
tableName,
|
||||
keyName,
|
||||
valName,
|
||||
TABLE_NAME,
|
||||
KEY_NAME,
|
||||
VAL_NAME,
|
||||
tsColumn,
|
||||
filterColumn + "='1'",
|
||||
FILTER_COLUMN + "='1'",
|
||||
new Period(0)
|
||||
);
|
||||
try (CacheScheduler.Entry entry = scheduler.schedule(extractionNamespace)) {
|
||||
CacheSchedulerTest.waitFor(entry);
|
||||
final Map<String, String> map = entry.getCache();
|
||||
|
||||
for (Map.Entry<String, String[]> e : renames.entrySet()) {
|
||||
for (Map.Entry<String, String[]> e : RENAMES.entrySet()) {
|
||||
String key = e.getKey();
|
||||
String[] val = e.getValue();
|
||||
String field = val[0];
|
||||
|
@ -470,9 +470,9 @@ public class JdbcExtractionNamespaceTest
|
|||
{
|
||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||
tableName,
|
||||
keyName,
|
||||
valName,
|
||||
TABLE_NAME,
|
||||
KEY_NAME,
|
||||
VAL_NAME,
|
||||
tsColumn,
|
||||
"some filter",
|
||||
new Period(10)
|
||||
|
@ -491,9 +491,9 @@ public class JdbcExtractionNamespaceTest
|
|||
{
|
||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||
tableName,
|
||||
keyName,
|
||||
valName,
|
||||
TABLE_NAME,
|
||||
KEY_NAME,
|
||||
VAL_NAME,
|
||||
tsColumn,
|
||||
null,
|
||||
new Period(10)
|
||||
|
|
|
@ -47,14 +47,14 @@ import java.util.Map;
|
|||
@RunWith(Parameterized.class)
|
||||
public class PollingLookupTest
|
||||
{
|
||||
private static final Map<String, String> firstLookupMap = ImmutableMap.of(
|
||||
private static final Map<String, String> FIRST_LOOKUP_MAP = ImmutableMap.of(
|
||||
"foo", "bar",
|
||||
"bad", "bar",
|
||||
"how about that", "foo",
|
||||
"empty string", ""
|
||||
);
|
||||
|
||||
private static final Map<String, String> secondLookupMap = ImmutableMap.of(
|
||||
private static final Map<String, String> SECOND_LOOKUP_MAP = ImmutableMap.of(
|
||||
"new-foo", "new-bar",
|
||||
"new-bad", "new-bar"
|
||||
);
|
||||
|
@ -71,9 +71,9 @@ public class PollingLookupTest
|
|||
{
|
||||
if (callNumber == 0) {
|
||||
callNumber++;
|
||||
return firstLookupMap.entrySet();
|
||||
return FIRST_LOOKUP_MAP.entrySet();
|
||||
}
|
||||
return secondLookupMap.entrySet();
|
||||
return SECOND_LOOKUP_MAP.entrySet();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
@ -145,15 +145,15 @@ public class PollingLookupTest
|
|||
@Test
|
||||
public void testApply()
|
||||
{
|
||||
assertMapLookup(firstLookupMap, pollingLookup);
|
||||
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
|
||||
}
|
||||
|
||||
@Test(timeout = POLL_PERIOD * 3)
|
||||
public void testApplyAfterDataChange() throws InterruptedException
|
||||
{
|
||||
assertMapLookup(firstLookupMap, pollingLookup);
|
||||
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
|
||||
Thread.sleep(POLL_PERIOD * 2);
|
||||
assertMapLookup(secondLookupMap, pollingLookup);
|
||||
assertMapLookup(SECOND_LOOKUP_MAP, pollingLookup);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -184,8 +184,8 @@ public class PollingLookupTest
|
|||
@Test
|
||||
public void testBulkApply()
|
||||
{
|
||||
Map<String, String> map = pollingLookup.applyAll(firstLookupMap.keySet());
|
||||
Assert.assertEquals(firstLookupMap, Maps.transformValues(map, new Function<String, String>()
|
||||
Map<String, String> map = pollingLookup.applyAll(FIRST_LOOKUP_MAP.keySet());
|
||||
Assert.assertEquals(FIRST_LOOKUP_MAP, Maps.transformValues(map, new Function<String, String>()
|
||||
{
|
||||
@Override
|
||||
public String apply(String input)
|
||||
|
|
|
@ -50,7 +50,7 @@ public class JdbcDataFetcherTest
|
|||
|
||||
|
||||
|
||||
private static final Map<String, String> lookupMap = ImmutableMap.of(
|
||||
private static final Map<String, String> LOOKUP_MAP = ImmutableMap.of(
|
||||
"foo", "bar",
|
||||
"bad", "bar",
|
||||
"how about that", "foo",
|
||||
|
@ -77,7 +77,7 @@ public class JdbcDataFetcherTest
|
|||
);
|
||||
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
|
||||
|
||||
for (Map.Entry<String, String> entry : lookupMap.entrySet()) {
|
||||
for (Map.Entry<String, String> entry : LOOKUP_MAP.entrySet()) {
|
||||
insertValues(entry.getKey(), entry.getValue(), handle);
|
||||
}
|
||||
handle.commit();
|
||||
|
@ -94,7 +94,7 @@ public class JdbcDataFetcherTest
|
|||
public void testFetch()
|
||||
{
|
||||
Assert.assertEquals("null check", null, jdbcDataFetcher.fetch("baz"));
|
||||
assertMapLookup(lookupMap, jdbcDataFetcher);
|
||||
assertMapLookup(LOOKUP_MAP, jdbcDataFetcher);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -102,15 +102,15 @@ public class JdbcDataFetcherTest
|
|||
{
|
||||
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
||||
jdbcDataFetcher.fetchAll().forEach(mapBuilder::put);
|
||||
Assert.assertEquals("maps should match", lookupMap, mapBuilder.build());
|
||||
Assert.assertEquals("maps should match", LOOKUP_MAP, mapBuilder.build());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFetchKeys()
|
||||
{
|
||||
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
||||
jdbcDataFetcher.fetch(lookupMap.keySet()).forEach(mapBuilder::put);
|
||||
Assert.assertEquals(lookupMap, mapBuilder.build());
|
||||
jdbcDataFetcher.fetch(LOOKUP_MAP.keySet()).forEach(mapBuilder::put);
|
||||
Assert.assertEquals(LOOKUP_MAP, mapBuilder.build());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -57,7 +57,7 @@ public class S3DataSegmentPuller implements URIDataPuller
|
|||
{
|
||||
public static final int DEFAULT_RETRY_COUNT = 3;
|
||||
|
||||
public static final String scheme = S3StorageDruidModule.SCHEME;
|
||||
public static final String SCHEME = S3StorageDruidModule.SCHEME;
|
||||
|
||||
private static final Logger log = new Logger(S3DataSegmentPuller.class);
|
||||
|
||||
|
@ -141,8 +141,8 @@ public class S3DataSegmentPuller implements URIDataPuller
|
|||
|
||||
public static URI checkURI(URI uri)
|
||||
{
|
||||
if (uri.getScheme().equalsIgnoreCase(scheme)) {
|
||||
uri = URI.create("s3" + uri.toString().substring(scheme.length()));
|
||||
if (uri.getScheme().equalsIgnoreCase(SCHEME)) {
|
||||
uri = URI.create("s3" + uri.toString().substring(SCHEME.length()));
|
||||
} else if (!"s3".equalsIgnoreCase(uri.getScheme())) {
|
||||
throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString());
|
||||
}
|
||||
|
|
|
@ -57,9 +57,9 @@ import java.util.stream.Collectors;
|
|||
*/
|
||||
public class StaticS3FirehoseFactoryTest
|
||||
{
|
||||
private static final AmazonS3Client S3_ClIENT = EasyMock.createNiceMock(AmazonS3Client.class);
|
||||
private static final AmazonS3Client S3_CLIENT = EasyMock.createNiceMock(AmazonS3Client.class);
|
||||
private static final ServerSideEncryptingAmazonS3 SERVICE = new ServerSideEncryptingAmazonS3(
|
||||
S3_ClIENT,
|
||||
S3_CLIENT,
|
||||
new NoopServerSideEncryption()
|
||||
);
|
||||
|
||||
|
@ -102,7 +102,7 @@ public class StaticS3FirehoseFactoryTest
|
|||
uris.sort(Comparator.comparing(URI::toString));
|
||||
|
||||
uris.forEach(StaticS3FirehoseFactoryTest::addExpectedObjject);
|
||||
EasyMock.replay(S3_ClIENT);
|
||||
EasyMock.replay(S3_CLIENT);
|
||||
|
||||
final StaticS3FirehoseFactory factory = new StaticS3FirehoseFactory(
|
||||
SERVICE,
|
||||
|
|
|
@ -51,7 +51,7 @@ import java.util.Set;
|
|||
|
||||
public class S3DataSegmentMoverTest
|
||||
{
|
||||
private static final DataSegment sourceSegment = new DataSegment(
|
||||
private static final DataSegment SOURCE_SEGMENT = new DataSegment(
|
||||
"test",
|
||||
Intervals.of("2013-01-01/2013-01-02"),
|
||||
"1",
|
||||
|
@ -80,7 +80,7 @@ public class S3DataSegmentMoverTest
|
|||
);
|
||||
|
||||
DataSegment movedSegment = mover.move(
|
||||
sourceSegment,
|
||||
SOURCE_SEGMENT,
|
||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||
);
|
||||
|
||||
|
@ -102,7 +102,7 @@ public class S3DataSegmentMoverTest
|
|||
);
|
||||
|
||||
DataSegment movedSegment = mover.move(
|
||||
sourceSegment,
|
||||
SOURCE_SEGMENT,
|
||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||
);
|
||||
|
||||
|
@ -120,7 +120,7 @@ public class S3DataSegmentMoverTest
|
|||
S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
|
||||
|
||||
mover.move(
|
||||
sourceSegment,
|
||||
SOURCE_SEGMENT,
|
||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||
);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ import java.util.Set;
|
|||
|
||||
public class S3DataSegmentPusherConfigTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testSerialization() throws IOException
|
||||
|
@ -41,8 +41,8 @@ public class S3DataSegmentPusherConfigTest
|
|||
String jsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
||||
+ "\"disableAcl\":false,\"maxListingLength\":2000,\"useS3aSchema\":false}";
|
||||
|
||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
Assert.assertEquals(jsonConfig, jsonMapper.writeValueAsString(config));
|
||||
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
Assert.assertEquals(jsonConfig, JSON_MAPPER.writeValueAsString(config));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -52,8 +52,8 @@ public class S3DataSegmentPusherConfigTest
|
|||
String expectedJsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
||||
+ "\"disableAcl\":false,\"maxListingLength\":1000,\"useS3aSchema\":false}";
|
||||
|
||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
Assert.assertEquals(expectedJsonConfig, jsonMapper.writeValueAsString(config));
|
||||
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
Assert.assertEquals(expectedJsonConfig, JSON_MAPPER.writeValueAsString(config));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -63,7 +63,7 @@ public class S3DataSegmentPusherConfigTest
|
|||
+ "\"disableAcl\":false,\"maxListingLength\":-1}";
|
||||
Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
|
||||
|
||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||
Set<ConstraintViolation<S3DataSegmentPusherConfig>> violations = validator.validate(config);
|
||||
Assert.assertEquals(1, violations.size());
|
||||
ConstraintViolation violation = Iterators.getOnlyElement(violations.iterator());
|
||||
|
|
|
@ -38,7 +38,7 @@ import java.util.List;
|
|||
*/
|
||||
public class VarianceSerde extends ComplexMetricSerde
|
||||
{
|
||||
private static final Ordering<VarianceAggregatorCollector> comparator =
|
||||
private static final Ordering<VarianceAggregatorCollector> COMPARATOR =
|
||||
Ordering.from(VarianceAggregatorCollector.COMPARATOR).nullsFirst();
|
||||
|
||||
@Override
|
||||
|
@ -114,7 +114,7 @@ public class VarianceSerde extends ComplexMetricSerde
|
|||
@Override
|
||||
public int compare(VarianceAggregatorCollector o1, VarianceAggregatorCollector o2)
|
||||
{
|
||||
return comparator.compare(o1, o2);
|
||||
return COMPARATOR.compare(o1, o2);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ import java.util.concurrent.ThreadLocalRandom;
|
|||
|
||||
public class VarianceAggregatorCollectorTest
|
||||
{
|
||||
private static final float[] market_upfront = new float[]{
|
||||
private static final float[] MARKET_UPFRONT = new float[]{
|
||||
800.0f, 800.0f, 826.0602f, 1564.6177f, 1006.4021f, 869.64374f, 809.04175f, 1458.4027f, 852.4375f, 879.9881f,
|
||||
950.1468f, 712.7746f, 846.2675f, 682.8855f, 1109.875f, 594.3817f, 870.1159f, 677.511f, 1410.2781f, 1219.4321f,
|
||||
979.306f, 1224.5016f, 1215.5898f, 716.6092f, 1301.0233f, 786.3633f, 989.9315f, 1609.0967f, 1023.2952f, 1367.6381f,
|
||||
|
@ -57,7 +57,7 @@ public class VarianceAggregatorCollectorTest
|
|||
989.0328f, 744.7446f, 1166.4012f, 753.105f, 962.7312f, 780.272f
|
||||
};
|
||||
|
||||
private static final float[] market_total_market = new float[]{
|
||||
private static final float[] MARKET_TOTAL_MARKET = new float[]{
|
||||
1000.0f, 1000.0f, 1040.9456f, 1689.0128f, 1049.142f, 1073.4766f, 1007.36554f, 1545.7089f, 1016.9652f, 1077.6127f,
|
||||
1075.0896f, 953.9954f, 1022.7833f, 937.06195f, 1156.7448f, 849.8775f, 1066.208f, 904.34064f, 1240.5255f,
|
||||
1343.2325f, 1088.9431f, 1349.2544f, 1102.8667f, 939.2441f, 1109.8754f, 997.99457f, 1037.4495f, 1686.4197f,
|
||||
|
@ -85,7 +85,7 @@ public class VarianceAggregatorCollectorTest
|
|||
public void testVariance()
|
||||
{
|
||||
Random random = ThreadLocalRandom.current();
|
||||
for (float[] values : Arrays.asList(market_upfront, market_total_market)) {
|
||||
for (float[] values : Arrays.asList(MARKET_UPFRONT, MARKET_TOTAL_MARKET)) {
|
||||
double sum = 0;
|
||||
for (float f : values) {
|
||||
sum += f;
|
||||
|
|
|
@ -96,12 +96,12 @@ public class VarianceGroupByQueryTest
|
|||
{
|
||||
GroupByQuery query = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||
.setAggregatorSpecs(VarianceTestHelper.indexVarianceAggr)
|
||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
|
||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||
.setAggregatorSpecs(VarianceTestHelper.INDEX_VARIANCE_AGGR)
|
||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||
.setGranularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||
.build();
|
||||
|
||||
VarianceTestHelper.RowBuilder builder =
|
||||
|
@ -138,16 +138,16 @@ public class VarianceGroupByQueryTest
|
|||
{
|
||||
GroupByQuery query = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||
.setAggregatorSpecs(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
VarianceTestHelper.indexVarianceAggr,
|
||||
QueryRunnerTestHelper.ROWS_COUNT,
|
||||
VarianceTestHelper.INDEX_VARIANCE_AGGR,
|
||||
new LongSumAggregatorFactory("idx", "index")
|
||||
)
|
||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
|
||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||
.setGranularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||
.build();
|
||||
|
||||
VarianceTestHelper.RowBuilder builder =
|
||||
|
@ -188,20 +188,20 @@ public class VarianceGroupByQueryTest
|
|||
|
||||
GroupByQuery query = GroupByQuery
|
||||
.builder()
|
||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.setInterval("2011-04-02/2011-04-04")
|
||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||
.setAggregatorSpecs(
|
||||
QueryRunnerTestHelper.rowsCount,
|
||||
QueryRunnerTestHelper.indexLongSum,
|
||||
VarianceTestHelper.indexVarianceAggr
|
||||
QueryRunnerTestHelper.ROWS_COUNT,
|
||||
QueryRunnerTestHelper.INDEX_LONG_SUM,
|
||||
VarianceTestHelper.INDEX_VARIANCE_AGGR
|
||||
)
|
||||
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr))
|
||||
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
|
||||
.setHavingSpec(
|
||||
new OrHavingSpec(
|
||||
ImmutableList.of(
|
||||
new GreaterThanHavingSpec(VarianceTestHelper.stddevOfIndexMetric, 15L) // 3 rows
|
||||
new GreaterThanHavingSpec(VarianceTestHelper.STD_DEV_OF_INDEX_METRIC, 15L) // 3 rows
|
||||
)
|
||||
)
|
||||
)
|
||||
|
@ -220,7 +220,7 @@ public class VarianceGroupByQueryTest
|
|||
new DefaultLimitSpec(
|
||||
Collections.singletonList(
|
||||
OrderByColumnSpec.asc(
|
||||
VarianceTestHelper.stddevOfIndexMetric
|
||||
VarianceTestHelper.STD_DEV_OF_INDEX_METRIC
|
||||
)
|
||||
), 2
|
||||
)
|
||||
|
|
|
@ -48,26 +48,26 @@ public class VarianceTestHelper extends QueryRunnerTestHelper
|
|||
module.configure(null);
|
||||
}
|
||||
|
||||
public static final String indexVarianceMetric = "index_var";
|
||||
public static final String INDEX_VARIANCE_METRIC = "index_var";
|
||||
|
||||
public static final VarianceAggregatorFactory indexVarianceAggr = new VarianceAggregatorFactory(
|
||||
indexVarianceMetric,
|
||||
indexMetric
|
||||
public static final VarianceAggregatorFactory INDEX_VARIANCE_AGGR = new VarianceAggregatorFactory(
|
||||
INDEX_VARIANCE_METRIC,
|
||||
INDEX_METRIC
|
||||
);
|
||||
|
||||
public static final String stddevOfIndexMetric = "index_stddev";
|
||||
public static final String STD_DEV_OF_INDEX_METRIC = "index_stddev";
|
||||
|
||||
public static final PostAggregator stddevOfIndexPostAggr = new StandardDeviationPostAggregator(
|
||||
stddevOfIndexMetric,
|
||||
indexVarianceMetric,
|
||||
public static final PostAggregator STD_DEV_OF_INDEX_POST_AGGR = new StandardDeviationPostAggregator(
|
||||
STD_DEV_OF_INDEX_METRIC,
|
||||
INDEX_VARIANCE_METRIC,
|
||||
null
|
||||
);
|
||||
|
||||
public static final List<AggregatorFactory> commonPlusVarAggregators = Arrays.asList(
|
||||
rowsCount,
|
||||
indexDoubleSum,
|
||||
qualityUniques,
|
||||
indexVarianceAggr
|
||||
public static final List<AggregatorFactory> COMMON_PLUS_VAR_AGGREGATORS = Arrays.asList(
|
||||
ROWS_COUNT,
|
||||
INDEX_DOUBLE_SUM,
|
||||
QUALITY_UNIQUES,
|
||||
INDEX_VARIANCE_AGGR
|
||||
);
|
||||
|
||||
public static class RowBuilder
|
||||
|
|
|
@ -70,14 +70,14 @@ public class VarianceTimeseriesQueryTest
|
|||
public void testTimeseriesWithNullFilterOnNonExistentDimension()
|
||||
{
|
||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.dayGran)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||
.filters("bobby", null)
|
||||
.intervals(QueryRunnerTestHelper.firstToThird)
|
||||
.aggregators(VarianceTestHelper.commonPlusVarAggregators)
|
||||
.intervals(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||
.aggregators(VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS)
|
||||
.postAggregators(
|
||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
||||
VarianceTestHelper.stddevOfIndexPostAggr
|
||||
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||
VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR
|
||||
)
|
||||
.descending(descending)
|
||||
.build();
|
||||
|
|
|
@ -68,16 +68,16 @@ public class VarianceTopNQueryTest
|
|||
public void testFullOnTopNOverUniques()
|
||||
{
|
||||
TopNQuery query = new TopNQueryBuilder()
|
||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||
.granularity(QueryRunnerTestHelper.allGran)
|
||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
||||
.metric(QueryRunnerTestHelper.uniqueMetric)
|
||||
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||
.metric(QueryRunnerTestHelper.UNIQUE_METRIC)
|
||||
.threshold(3)
|
||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
||||
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||
.aggregators(
|
||||
Lists.newArrayList(
|
||||
Iterables.concat(
|
||||
VarianceTestHelper.commonPlusVarAggregators,
|
||||
VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS,
|
||||
Lists.newArrayList(
|
||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||
new DoubleMinAggregatorFactory("minIndex", "index")
|
||||
|
@ -85,7 +85,7 @@ public class VarianceTopNQueryTest
|
|||
)
|
||||
)
|
||||
)
|
||||
.postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
|
||||
.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)
|
||||
.build();
|
||||
|
||||
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(
|
||||
|
|
|
@ -23,266 +23,266 @@ package org.apache.druid.hll;
|
|||
*/
|
||||
public class ByteBitLookup
|
||||
{
|
||||
public static final byte[] lookup;
|
||||
public static final byte[] LOOKUP;
|
||||
|
||||
static {
|
||||
lookup = new byte[256];
|
||||
LOOKUP = new byte[256];
|
||||
|
||||
lookup[0] = 0;
|
||||
lookup[1] = 1;
|
||||
lookup[2] = 2;
|
||||
lookup[3] = 1;
|
||||
lookup[4] = 3;
|
||||
lookup[5] = 1;
|
||||
lookup[6] = 2;
|
||||
lookup[7] = 1;
|
||||
lookup[8] = 4;
|
||||
lookup[9] = 1;
|
||||
lookup[10] = 2;
|
||||
lookup[11] = 1;
|
||||
lookup[12] = 3;
|
||||
lookup[13] = 1;
|
||||
lookup[14] = 2;
|
||||
lookup[15] = 1;
|
||||
lookup[16] = 5;
|
||||
lookup[17] = 1;
|
||||
lookup[18] = 2;
|
||||
lookup[19] = 1;
|
||||
lookup[20] = 3;
|
||||
lookup[21] = 1;
|
||||
lookup[22] = 2;
|
||||
lookup[23] = 1;
|
||||
lookup[24] = 4;
|
||||
lookup[25] = 1;
|
||||
lookup[26] = 2;
|
||||
lookup[27] = 1;
|
||||
lookup[28] = 3;
|
||||
lookup[29] = 1;
|
||||
lookup[30] = 2;
|
||||
lookup[31] = 1;
|
||||
lookup[32] = 6;
|
||||
lookup[33] = 1;
|
||||
lookup[34] = 2;
|
||||
lookup[35] = 1;
|
||||
lookup[36] = 3;
|
||||
lookup[37] = 1;
|
||||
lookup[38] = 2;
|
||||
lookup[39] = 1;
|
||||
lookup[40] = 4;
|
||||
lookup[41] = 1;
|
||||
lookup[42] = 2;
|
||||
lookup[43] = 1;
|
||||
lookup[44] = 3;
|
||||
lookup[45] = 1;
|
||||
lookup[46] = 2;
|
||||
lookup[47] = 1;
|
||||
lookup[48] = 5;
|
||||
lookup[49] = 1;
|
||||
lookup[50] = 2;
|
||||
lookup[51] = 1;
|
||||
lookup[52] = 3;
|
||||
lookup[53] = 1;
|
||||
lookup[54] = 2;
|
||||
lookup[55] = 1;
|
||||
lookup[56] = 4;
|
||||
lookup[57] = 1;
|
||||
lookup[58] = 2;
|
||||
lookup[59] = 1;
|
||||
lookup[60] = 3;
|
||||
lookup[61] = 1;
|
||||
lookup[62] = 2;
|
||||
lookup[63] = 1;
|
||||
lookup[64] = 7;
|
||||
lookup[65] = 1;
|
||||
lookup[66] = 2;
|
||||
lookup[67] = 1;
|
||||
lookup[68] = 3;
|
||||
lookup[69] = 1;
|
||||
lookup[70] = 2;
|
||||
lookup[71] = 1;
|
||||
lookup[72] = 4;
|
||||
lookup[73] = 1;
|
||||
lookup[74] = 2;
|
||||
lookup[75] = 1;
|
||||
lookup[76] = 3;
|
||||
lookup[77] = 1;
|
||||
lookup[78] = 2;
|
||||
lookup[79] = 1;
|
||||
lookup[80] = 5;
|
||||
lookup[81] = 1;
|
||||
lookup[82] = 2;
|
||||
lookup[83] = 1;
|
||||
lookup[84] = 3;
|
||||
lookup[85] = 1;
|
||||
lookup[86] = 2;
|
||||
lookup[87] = 1;
|
||||
lookup[88] = 4;
|
||||
lookup[89] = 1;
|
||||
lookup[90] = 2;
|
||||
lookup[91] = 1;
|
||||
lookup[92] = 3;
|
||||
lookup[93] = 1;
|
||||
lookup[94] = 2;
|
||||
lookup[95] = 1;
|
||||
lookup[96] = 6;
|
||||
lookup[97] = 1;
|
||||
lookup[98] = 2;
|
||||
lookup[99] = 1;
|
||||
lookup[100] = 3;
|
||||
lookup[101] = 1;
|
||||
lookup[102] = 2;
|
||||
lookup[103] = 1;
|
||||
lookup[104] = 4;
|
||||
lookup[105] = 1;
|
||||
lookup[106] = 2;
|
||||
lookup[107] = 1;
|
||||
lookup[108] = 3;
|
||||
lookup[109] = 1;
|
||||
lookup[110] = 2;
|
||||
lookup[111] = 1;
|
||||
lookup[112] = 5;
|
||||
lookup[113] = 1;
|
||||
lookup[114] = 2;
|
||||
lookup[115] = 1;
|
||||
lookup[116] = 3;
|
||||
lookup[117] = 1;
|
||||
lookup[118] = 2;
|
||||
lookup[119] = 1;
|
||||
lookup[120] = 4;
|
||||
lookup[121] = 1;
|
||||
lookup[122] = 2;
|
||||
lookup[123] = 1;
|
||||
lookup[124] = 3;
|
||||
lookup[125] = 1;
|
||||
lookup[126] = 2;
|
||||
lookup[127] = 1;
|
||||
lookup[128] = 8;
|
||||
lookup[129] = 1;
|
||||
lookup[130] = 2;
|
||||
lookup[131] = 1;
|
||||
lookup[132] = 3;
|
||||
lookup[133] = 1;
|
||||
lookup[134] = 2;
|
||||
lookup[135] = 1;
|
||||
lookup[136] = 4;
|
||||
lookup[137] = 1;
|
||||
lookup[138] = 2;
|
||||
lookup[139] = 1;
|
||||
lookup[140] = 3;
|
||||
lookup[141] = 1;
|
||||
lookup[142] = 2;
|
||||
lookup[143] = 1;
|
||||
lookup[144] = 5;
|
||||
lookup[145] = 1;
|
||||
lookup[146] = 2;
|
||||
lookup[147] = 1;
|
||||
lookup[148] = 3;
|
||||
lookup[149] = 1;
|
||||
lookup[150] = 2;
|
||||
lookup[151] = 1;
|
||||
lookup[152] = 4;
|
||||
lookup[153] = 1;
|
||||
lookup[154] = 2;
|
||||
lookup[155] = 1;
|
||||
lookup[156] = 3;
|
||||
lookup[157] = 1;
|
||||
lookup[158] = 2;
|
||||
lookup[159] = 1;
|
||||
lookup[160] = 6;
|
||||
lookup[161] = 1;
|
||||
lookup[162] = 2;
|
||||
lookup[163] = 1;
|
||||
lookup[164] = 3;
|
||||
lookup[165] = 1;
|
||||
lookup[166] = 2;
|
||||
lookup[167] = 1;
|
||||
lookup[168] = 4;
|
||||
lookup[169] = 1;
|
||||
lookup[170] = 2;
|
||||
lookup[171] = 1;
|
||||
lookup[172] = 3;
|
||||
lookup[173] = 1;
|
||||
lookup[174] = 2;
|
||||
lookup[175] = 1;
|
||||
lookup[176] = 5;
|
||||
lookup[177] = 1;
|
||||
lookup[178] = 2;
|
||||
lookup[179] = 1;
|
||||
lookup[180] = 3;
|
||||
lookup[181] = 1;
|
||||
lookup[182] = 2;
|
||||
lookup[183] = 1;
|
||||
lookup[184] = 4;
|
||||
lookup[185] = 1;
|
||||
lookup[186] = 2;
|
||||
lookup[187] = 1;
|
||||
lookup[188] = 3;
|
||||
lookup[189] = 1;
|
||||
lookup[190] = 2;
|
||||
lookup[191] = 1;
|
||||
lookup[192] = 7;
|
||||
lookup[193] = 1;
|
||||
lookup[194] = 2;
|
||||
lookup[195] = 1;
|
||||
lookup[196] = 3;
|
||||
lookup[197] = 1;
|
||||
lookup[198] = 2;
|
||||
lookup[199] = 1;
|
||||
lookup[200] = 4;
|
||||
lookup[201] = 1;
|
||||
lookup[202] = 2;
|
||||
lookup[203] = 1;
|
||||
lookup[204] = 3;
|
||||
lookup[205] = 1;
|
||||
lookup[206] = 2;
|
||||
lookup[207] = 1;
|
||||
lookup[208] = 5;
|
||||
lookup[209] = 1;
|
||||
lookup[210] = 2;
|
||||
lookup[211] = 1;
|
||||
lookup[212] = 3;
|
||||
lookup[213] = 1;
|
||||
lookup[214] = 2;
|
||||
lookup[215] = 1;
|
||||
lookup[216] = 4;
|
||||
lookup[217] = 1;
|
||||
lookup[218] = 2;
|
||||
lookup[219] = 1;
|
||||
lookup[220] = 3;
|
||||
lookup[221] = 1;
|
||||
lookup[222] = 2;
|
||||
lookup[223] = 1;
|
||||
lookup[224] = 6;
|
||||
lookup[225] = 1;
|
||||
lookup[226] = 2;
|
||||
lookup[227] = 1;
|
||||
lookup[228] = 3;
|
||||
lookup[229] = 1;
|
||||
lookup[230] = 2;
|
||||
lookup[231] = 1;
|
||||
lookup[232] = 4;
|
||||
lookup[233] = 1;
|
||||
lookup[234] = 2;
|
||||
lookup[235] = 1;
|
||||
lookup[236] = 3;
|
||||
lookup[237] = 1;
|
||||
lookup[238] = 2;
|
||||
lookup[239] = 1;
|
||||
lookup[240] = 5;
|
||||
lookup[241] = 1;
|
||||
lookup[242] = 2;
|
||||
lookup[243] = 1;
|
||||
lookup[244] = 3;
|
||||
lookup[245] = 1;
|
||||
lookup[246] = 2;
|
||||
lookup[247] = 1;
|
||||
lookup[248] = 4;
|
||||
lookup[249] = 1;
|
||||
lookup[250] = 2;
|
||||
lookup[251] = 1;
|
||||
lookup[252] = 3;
|
||||
lookup[253] = 1;
|
||||
lookup[254] = 2;
|
||||
lookup[255] = 1;
|
||||
LOOKUP[0] = 0;
|
||||
LOOKUP[1] = 1;
|
||||
LOOKUP[2] = 2;
|
||||
LOOKUP[3] = 1;
|
||||
LOOKUP[4] = 3;
|
||||
LOOKUP[5] = 1;
|
||||
LOOKUP[6] = 2;
|
||||
LOOKUP[7] = 1;
|
||||
LOOKUP[8] = 4;
|
||||
LOOKUP[9] = 1;
|
||||
LOOKUP[10] = 2;
|
||||
LOOKUP[11] = 1;
|
||||
LOOKUP[12] = 3;
|
||||
LOOKUP[13] = 1;
|
||||
LOOKUP[14] = 2;
|
||||
LOOKUP[15] = 1;
|
||||
LOOKUP[16] = 5;
|
||||
LOOKUP[17] = 1;
|
||||
LOOKUP[18] = 2;
|
||||
LOOKUP[19] = 1;
|
||||
LOOKUP[20] = 3;
|
||||
LOOKUP[21] = 1;
|
||||
LOOKUP[22] = 2;
|
||||
LOOKUP[23] = 1;
|
||||
LOOKUP[24] = 4;
|
||||
LOOKUP[25] = 1;
|
||||
LOOKUP[26] = 2;
|
||||
LOOKUP[27] = 1;
|
||||
LOOKUP[28] = 3;
|
||||
LOOKUP[29] = 1;
|
||||
LOOKUP[30] = 2;
|
||||
LOOKUP[31] = 1;
|
||||
LOOKUP[32] = 6;
|
||||
LOOKUP[33] = 1;
|
||||
LOOKUP[34] = 2;
|
||||
LOOKUP[35] = 1;
|
||||
LOOKUP[36] = 3;
|
||||
LOOKUP[37] = 1;
|
||||
LOOKUP[38] = 2;
|
||||
LOOKUP[39] = 1;
|
||||
LOOKUP[40] = 4;
|
||||
LOOKUP[41] = 1;
|
||||
LOOKUP[42] = 2;
|
||||
LOOKUP[43] = 1;
|
||||
LOOKUP[44] = 3;
|
||||
LOOKUP[45] = 1;
|
||||
LOOKUP[46] = 2;
|
||||
LOOKUP[47] = 1;
|
||||
LOOKUP[48] = 5;
|
||||
LOOKUP[49] = 1;
|
||||
LOOKUP[50] = 2;
|
||||
LOOKUP[51] = 1;
|
||||
LOOKUP[52] = 3;
|
||||
LOOKUP[53] = 1;
|
||||
LOOKUP[54] = 2;
|
||||
LOOKUP[55] = 1;
|
||||
LOOKUP[56] = 4;
|
||||
LOOKUP[57] = 1;
|
||||
LOOKUP[58] = 2;
|
||||
LOOKUP[59] = 1;
|
||||
LOOKUP[60] = 3;
|
||||
LOOKUP[61] = 1;
|
||||
LOOKUP[62] = 2;
|
||||
LOOKUP[63] = 1;
|
||||
LOOKUP[64] = 7;
|
||||
LOOKUP[65] = 1;
|
||||
LOOKUP[66] = 2;
|
||||
LOOKUP[67] = 1;
|
||||
LOOKUP[68] = 3;
|
||||
LOOKUP[69] = 1;
|
||||
LOOKUP[70] = 2;
|
||||
LOOKUP[71] = 1;
|
||||
LOOKUP[72] = 4;
|
||||
LOOKUP[73] = 1;
|
||||
LOOKUP[74] = 2;
|
||||
LOOKUP[75] = 1;
|
||||
LOOKUP[76] = 3;
|
||||
LOOKUP[77] = 1;
|
||||
LOOKUP[78] = 2;
|
||||
LOOKUP[79] = 1;
|
||||
LOOKUP[80] = 5;
|
||||
LOOKUP[81] = 1;
|
||||
LOOKUP[82] = 2;
|
||||
LOOKUP[83] = 1;
|
||||
LOOKUP[84] = 3;
|
||||
LOOKUP[85] = 1;
|
||||
LOOKUP[86] = 2;
|
||||
LOOKUP[87] = 1;
|
||||
LOOKUP[88] = 4;
|
||||
LOOKUP[89] = 1;
|
||||
LOOKUP[90] = 2;
|
||||
LOOKUP[91] = 1;
|
||||
LOOKUP[92] = 3;
|
||||
LOOKUP[93] = 1;
|
||||
LOOKUP[94] = 2;
|
||||
LOOKUP[95] = 1;
|
||||
LOOKUP[96] = 6;
|
||||
LOOKUP[97] = 1;
|
||||
LOOKUP[98] = 2;
|
||||
LOOKUP[99] = 1;
|
||||
LOOKUP[100] = 3;
|
||||
LOOKUP[101] = 1;
|
||||
LOOKUP[102] = 2;
|
||||
LOOKUP[103] = 1;
|
||||
LOOKUP[104] = 4;
|
||||
LOOKUP[105] = 1;
|
||||
LOOKUP[106] = 2;
|
||||
LOOKUP[107] = 1;
|
||||
LOOKUP[108] = 3;
|
||||
LOOKUP[109] = 1;
|
||||
LOOKUP[110] = 2;
|
||||
LOOKUP[111] = 1;
|
||||
LOOKUP[112] = 5;
|
||||
LOOKUP[113] = 1;
|
||||
LOOKUP[114] = 2;
|
||||
LOOKUP[115] = 1;
|
||||
LOOKUP[116] = 3;
|
||||
LOOKUP[117] = 1;
|
||||
LOOKUP[118] = 2;
|
||||
LOOKUP[119] = 1;
|
||||
LOOKUP[120] = 4;
|
||||
LOOKUP[121] = 1;
|
||||
LOOKUP[122] = 2;
|
||||
LOOKUP[123] = 1;
|
||||
LOOKUP[124] = 3;
|
||||
LOOKUP[125] = 1;
|
||||
LOOKUP[126] = 2;
|
||||
LOOKUP[127] = 1;
|
||||
LOOKUP[128] = 8;
|
||||
LOOKUP[129] = 1;
|
||||
LOOKUP[130] = 2;
|
||||
LOOKUP[131] = 1;
|
||||
LOOKUP[132] = 3;
|
||||
LOOKUP[133] = 1;
|
||||
LOOKUP[134] = 2;
|
||||
LOOKUP[135] = 1;
|
||||
LOOKUP[136] = 4;
|
||||
LOOKUP[137] = 1;
|
||||
LOOKUP[138] = 2;
|
||||
LOOKUP[139] = 1;
|
||||
LOOKUP[140] = 3;
|
||||
LOOKUP[141] = 1;
|
||||
LOOKUP[142] = 2;
|
||||
LOOKUP[143] = 1;
|
||||
LOOKUP[144] = 5;
|
||||
LOOKUP[145] = 1;
|
||||
LOOKUP[146] = 2;
|
||||
LOOKUP[147] = 1;
|
||||
LOOKUP[148] = 3;
|
||||
LOOKUP[149] = 1;
|
||||
LOOKUP[150] = 2;
|
||||
LOOKUP[151] = 1;
|
||||
LOOKUP[152] = 4;
|
||||
LOOKUP[153] = 1;
|
||||
LOOKUP[154] = 2;
|
||||
LOOKUP[155] = 1;
|
||||
LOOKUP[156] = 3;
|
||||
LOOKUP[157] = 1;
|
||||
LOOKUP[158] = 2;
|
||||
LOOKUP[159] = 1;
|
||||
LOOKUP[160] = 6;
|
||||
LOOKUP[161] = 1;
|
||||
LOOKUP[162] = 2;
|
||||
LOOKUP[163] = 1;
|
||||
LOOKUP[164] = 3;
|
||||
LOOKUP[165] = 1;
|
||||
LOOKUP[166] = 2;
|
||||
LOOKUP[167] = 1;
|
||||
LOOKUP[168] = 4;
|
||||
LOOKUP[169] = 1;
|
||||
LOOKUP[170] = 2;
|
||||
LOOKUP[171] = 1;
|
||||
LOOKUP[172] = 3;
|
||||
LOOKUP[173] = 1;
|
||||
LOOKUP[174] = 2;
|
||||
LOOKUP[175] = 1;
|
||||
LOOKUP[176] = 5;
|
||||
LOOKUP[177] = 1;
|
||||
LOOKUP[178] = 2;
|
||||
LOOKUP[179] = 1;
|
||||
LOOKUP[180] = 3;
|
||||
LOOKUP[181] = 1;
|
||||
LOOKUP[182] = 2;
|
||||
LOOKUP[183] = 1;
|
||||
LOOKUP[184] = 4;
|
||||
LOOKUP[185] = 1;
|
||||
LOOKUP[186] = 2;
|
||||
LOOKUP[187] = 1;
|
||||
LOOKUP[188] = 3;
|
||||
LOOKUP[189] = 1;
|
||||
LOOKUP[190] = 2;
|
||||
LOOKUP[191] = 1;
|
||||
LOOKUP[192] = 7;
|
||||
LOOKUP[193] = 1;
|
||||
LOOKUP[194] = 2;
|
||||
LOOKUP[195] = 1;
|
||||
LOOKUP[196] = 3;
|
||||
LOOKUP[197] = 1;
|
||||
LOOKUP[198] = 2;
|
||||
LOOKUP[199] = 1;
|
||||
LOOKUP[200] = 4;
|
||||
LOOKUP[201] = 1;
|
||||
LOOKUP[202] = 2;
|
||||
LOOKUP[203] = 1;
|
||||
LOOKUP[204] = 3;
|
||||
LOOKUP[205] = 1;
|
||||
LOOKUP[206] = 2;
|
||||
LOOKUP[207] = 1;
|
||||
LOOKUP[208] = 5;
|
||||
LOOKUP[209] = 1;
|
||||
LOOKUP[210] = 2;
|
||||
LOOKUP[211] = 1;
|
||||
LOOKUP[212] = 3;
|
||||
LOOKUP[213] = 1;
|
||||
LOOKUP[214] = 2;
|
||||
LOOKUP[215] = 1;
|
||||
LOOKUP[216] = 4;
|
||||
LOOKUP[217] = 1;
|
||||
LOOKUP[218] = 2;
|
||||
LOOKUP[219] = 1;
|
||||
LOOKUP[220] = 3;
|
||||
LOOKUP[221] = 1;
|
||||
LOOKUP[222] = 2;
|
||||
LOOKUP[223] = 1;
|
||||
LOOKUP[224] = 6;
|
||||
LOOKUP[225] = 1;
|
||||
LOOKUP[226] = 2;
|
||||
LOOKUP[227] = 1;
|
||||
LOOKUP[228] = 3;
|
||||
LOOKUP[229] = 1;
|
||||
LOOKUP[230] = 2;
|
||||
LOOKUP[231] = 1;
|
||||
LOOKUP[232] = 4;
|
||||
LOOKUP[233] = 1;
|
||||
LOOKUP[234] = 2;
|
||||
LOOKUP[235] = 1;
|
||||
LOOKUP[236] = 3;
|
||||
LOOKUP[237] = 1;
|
||||
LOOKUP[238] = 2;
|
||||
LOOKUP[239] = 1;
|
||||
LOOKUP[240] = 5;
|
||||
LOOKUP[241] = 1;
|
||||
LOOKUP[242] = 2;
|
||||
LOOKUP[243] = 1;
|
||||
LOOKUP[244] = 3;
|
||||
LOOKUP[245] = 1;
|
||||
LOOKUP[246] = 2;
|
||||
LOOKUP[247] = 1;
|
||||
LOOKUP[248] = 4;
|
||||
LOOKUP[249] = 1;
|
||||
LOOKUP[250] = 2;
|
||||
LOOKUP[251] = 1;
|
||||
LOOKUP[252] = 3;
|
||||
LOOKUP[253] = 1;
|
||||
LOOKUP[254] = 2;
|
||||
LOOKUP[255] = 1;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -64,29 +64,29 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
public static final double HIGH_CORRECTION_THRESHOLD = TWO_TO_THE_SIXTY_FOUR / 30.0d;
|
||||
public static final double CORRECTION_PARAMETER = ALPHA * NUM_BUCKETS * NUM_BUCKETS;
|
||||
|
||||
private static final int bucketMask = 0x7ff;
|
||||
private static final int minBytesRequired = 10;
|
||||
private static final int bitsPerBucket = 4;
|
||||
private static final int range = (int) Math.pow(2, bitsPerBucket) - 1;
|
||||
private static final int BUCKET_MASK = 0x7ff;
|
||||
private static final int MIN_BYTES_REQUIRED = 10;
|
||||
private static final int BITS_PER_BUCKET = 4;
|
||||
private static final int RANGE = (int) Math.pow(2, BITS_PER_BUCKET) - 1;
|
||||
|
||||
private static final double[][] minNumRegisterLookup = new double[64][256];
|
||||
private static final double[][] MIN_NUM_REGISTER_LOOKUP = new double[64][256];
|
||||
|
||||
static {
|
||||
for (int registerOffset = 0; registerOffset < 64; ++registerOffset) {
|
||||
for (int register = 0; register < 256; ++register) {
|
||||
final int upper = ((register & 0xf0) >> 4) + registerOffset;
|
||||
final int lower = (register & 0x0f) + registerOffset;
|
||||
minNumRegisterLookup[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower);
|
||||
MIN_NUM_REGISTER_LOOKUP[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2)
|
||||
private static final int[] numZeroLookup = new int[256];
|
||||
private static final int[] NUM_ZERO_LOOKUP = new int[256];
|
||||
|
||||
static {
|
||||
for (int i = 0; i < numZeroLookup.length; ++i) {
|
||||
numZeroLookup[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0);
|
||||
for (int i = 0; i < NUM_ZERO_LOOKUP.length; ++i) {
|
||||
NUM_ZERO_LOOKUP[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,7 +181,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
short position = copy.getShort();
|
||||
final int register = (int) copy.get() & 0xff;
|
||||
if (overflowValue != 0 && position == overflowPosition) {
|
||||
int upperNibble = ((register & 0xf0) >>> bitsPerBucket) + minNum;
|
||||
int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum;
|
||||
int lowerNibble = (register & 0x0f) + minNum;
|
||||
if (isUpperNibble) {
|
||||
upperNibble = Math.max(upperNibble, overflowValue);
|
||||
|
@ -191,8 +191,8 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
||||
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
||||
} else {
|
||||
e += minNumRegisterLookup[minNum][register];
|
||||
zeroCount += numZeroLookup[register];
|
||||
e += MIN_NUM_REGISTER_LOOKUP[minNum][register];
|
||||
zeroCount += NUM_ZERO_LOOKUP[register];
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,7 +215,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
while (copy.hasRemaining()) {
|
||||
final int register = (int) copy.get() & 0xff;
|
||||
if (overflowValue != 0 && position == overflowPosition) {
|
||||
int upperNibble = ((register & 0xf0) >>> bitsPerBucket) + minNum;
|
||||
int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum;
|
||||
int lowerNibble = (register & 0x0f) + minNum;
|
||||
if (isUpperNibble) {
|
||||
upperNibble = Math.max(upperNibble, overflowValue);
|
||||
|
@ -225,8 +225,8 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
||||
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
||||
} else {
|
||||
e += minNumRegisterLookup[minNum][register];
|
||||
zeroCount += numZeroLookup[register];
|
||||
e += MIN_NUM_REGISTER_LOOKUP[minNum][register];
|
||||
zeroCount += NUM_ZERO_LOOKUP[register];
|
||||
}
|
||||
position++;
|
||||
}
|
||||
|
@ -302,20 +302,20 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
|
||||
public void add(byte[] hashedValue)
|
||||
{
|
||||
if (hashedValue.length < minBytesRequired) {
|
||||
throw new IAE("Insufficient bytes, need[%d] got [%d]", minBytesRequired, hashedValue.length);
|
||||
if (hashedValue.length < MIN_BYTES_REQUIRED) {
|
||||
throw new IAE("Insufficient bytes, need[%d] got [%d]", MIN_BYTES_REQUIRED, hashedValue.length);
|
||||
}
|
||||
|
||||
estimatedCardinality = null;
|
||||
|
||||
final ByteBuffer buffer = ByteBuffer.wrap(hashedValue);
|
||||
|
||||
short bucket = (short) (buffer.getShort(hashedValue.length - 2) & bucketMask);
|
||||
short bucket = (short) (buffer.getShort(hashedValue.length - 2) & BUCKET_MASK);
|
||||
|
||||
byte positionOf1 = 0;
|
||||
|
||||
for (int i = 0; i < 8; ++i) {
|
||||
byte lookupVal = ByteBitLookup.lookup[UnsignedBytes.toInt(hashedValue[i])];
|
||||
byte lookupVal = ByteBitLookup.LOOKUP[UnsignedBytes.toInt(hashedValue[i])];
|
||||
switch (lookupVal) {
|
||||
case 0:
|
||||
positionOf1 += (byte) 8;
|
||||
|
@ -341,10 +341,10 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
// discard everything outside of the range we care about
|
||||
if (positionOf1 <= registerOffset) {
|
||||
return;
|
||||
} else if (positionOf1 > (registerOffset + range)) {
|
||||
} else if (positionOf1 > (registerOffset + RANGE)) {
|
||||
final byte currMax = getMaxOverflowValue();
|
||||
if (positionOf1 > currMax) {
|
||||
if (currMax <= (registerOffset + range)) {
|
||||
if (currMax <= (registerOffset + RANGE)) {
|
||||
// this could be optimized by having an add without sanity checks
|
||||
add(getMaxOverflowRegister(), currMax);
|
||||
}
|
||||
|
@ -665,7 +665,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
final int position = getPayloadBytePosition() + (short) (bucket >> 1);
|
||||
final boolean isUpperNibble = ((bucket & 0x1) == 0);
|
||||
|
||||
final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << bitsPerBucket) : positionOf1;
|
||||
final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << BITS_PER_BUCKET) : positionOf1;
|
||||
|
||||
if (storageBuffer.remaining() != getNumBytesForDenseStorage()) {
|
||||
convertToDenseStorage();
|
||||
|
@ -712,7 +712,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||
final int lowerNibble = currVal & 0x0f;
|
||||
|
||||
// subtract the differences so that the nibbles align
|
||||
final int otherUpper = (byteToAdd & 0xf0) - (offsetDiff << bitsPerBucket);
|
||||
final int otherUpper = (byteToAdd & 0xf0) - (offsetDiff << BITS_PER_BUCKET);
|
||||
final int otherLower = (byteToAdd & 0x0f) - offsetDiff;
|
||||
|
||||
final int newUpper = Math.max(upperNibble, otherUpper);
|
||||
|
|
|
@ -41,12 +41,12 @@ public class VersionOneHyperLogLogCollector extends HyperLogLogCollector
|
|||
public static final int HEADER_NUM_BYTES = 7;
|
||||
public static final int NUM_BYTES_FOR_DENSE_STORAGE = NUM_BYTES_FOR_BUCKETS + HEADER_NUM_BYTES;
|
||||
|
||||
private static final ByteBuffer defaultStorageBuffer = ByteBuffer.wrap(new byte[]{VERSION, 0, 0, 0, 0, 0, 0})
|
||||
private static final ByteBuffer DEFAULT_STORAGE_BUFFER = ByteBuffer.wrap(new byte[]{VERSION, 0, 0, 0, 0, 0, 0})
|
||||
.asReadOnlyBuffer();
|
||||
|
||||
VersionOneHyperLogLogCollector()
|
||||
{
|
||||
super(defaultStorageBuffer.duplicate());
|
||||
super(DEFAULT_STORAGE_BUFFER.duplicate());
|
||||
}
|
||||
|
||||
VersionOneHyperLogLogCollector(ByteBuffer buffer)
|
||||
|
|
|
@ -55,7 +55,7 @@ public class HyperLogLogSerdeBenchmarkTest extends AbstractBenchmark
|
|||
this.NUM_HASHES = num_hashes;
|
||||
}
|
||||
|
||||
private static final HashFunction hashFunction = Hashing.murmur3_128();
|
||||
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128();
|
||||
|
||||
@Parameterized.Parameters
|
||||
public static Collection<Object[]> getParameters()
|
||||
|
@ -216,13 +216,13 @@ public class HyperLogLogSerdeBenchmarkTest extends AbstractBenchmark
|
|||
{
|
||||
Random rand = new Random(758190);
|
||||
for (long i = 0; i < NUM_HASHES; ++i) {
|
||||
collector.add(hashFunction.hashLong(rand.nextLong()).asBytes());
|
||||
collector.add(HASH_FUNCTION.hashLong(rand.nextLong()).asBytes());
|
||||
}
|
||||
}
|
||||
|
||||
private static HashCode getHash(final ByteBuffer byteBuffer)
|
||||
{
|
||||
Hasher hasher = hashFunction.newHasher();
|
||||
Hasher hasher = HASH_FUNCTION.newHasher();
|
||||
while (byteBuffer.position() < byteBuffer.limit()) {
|
||||
hasher.putByte(byteBuffer.get());
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ import java.util.SortedSet;
|
|||
*/
|
||||
public class HadoopDruidIndexerConfig
|
||||
{
|
||||
private static final Injector injector;
|
||||
private static final Injector INJECTOR;
|
||||
|
||||
public static final String CONFIG_PROPERTY = "druid.indexer.config";
|
||||
public static final Charset JAVA_NATIVE_CHARSET = Charset.forName("Unicode");
|
||||
|
@ -99,7 +99,7 @@ public class HadoopDruidIndexerConfig
|
|||
|
||||
|
||||
static {
|
||||
injector = Initialization.makeInjectorWithModules(
|
||||
INJECTOR = Initialization.makeInjectorWithModules(
|
||||
GuiceInjectors.makeStartupInjector(),
|
||||
ImmutableList.of(
|
||||
new Module()
|
||||
|
@ -118,11 +118,11 @@ public class HadoopDruidIndexerConfig
|
|||
new IndexingHadoopModule()
|
||||
)
|
||||
);
|
||||
JSON_MAPPER = injector.getInstance(ObjectMapper.class);
|
||||
INDEX_IO = injector.getInstance(IndexIO.class);
|
||||
INDEX_MERGER_V9 = injector.getInstance(IndexMergerV9.class);
|
||||
HADOOP_KERBEROS_CONFIG = injector.getInstance(HadoopKerberosConfig.class);
|
||||
DATA_SEGMENT_PUSHER = injector.getInstance(DataSegmentPusher.class);
|
||||
JSON_MAPPER = INJECTOR.getInstance(ObjectMapper.class);
|
||||
INDEX_IO = INJECTOR.getInstance(IndexIO.class);
|
||||
INDEX_MERGER_V9 = INJECTOR.getInstance(IndexMergerV9.class);
|
||||
HADOOP_KERBEROS_CONFIG = INJECTOR.getInstance(HadoopKerberosConfig.class);
|
||||
DATA_SEGMENT_PUSHER = INJECTOR.getInstance(DataSegmentPusher.class);
|
||||
}
|
||||
|
||||
public enum IndexJobCounters
|
||||
|
|
|
@ -317,7 +317,7 @@ public class IndexGeneratorJob implements Jobby
|
|||
|
||||
public static class IndexGeneratorMapper extends HadoopDruidIndexerMapper<BytesWritable, BytesWritable>
|
||||
{
|
||||
private static final HashFunction hashFunction = Hashing.murmur3_128();
|
||||
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128();
|
||||
|
||||
private AggregatorFactory[] aggregators;
|
||||
|
||||
|
@ -364,7 +364,7 @@ public class IndexGeneratorJob implements Jobby
|
|||
final long truncatedTimestamp = granularitySpec.getQueryGranularity()
|
||||
.bucketStart(inputRow.getTimestamp())
|
||||
.getMillis();
|
||||
final byte[] hashedDimensions = hashFunction.hashBytes(
|
||||
final byte[] hashedDimensions = HASH_FUNCTION.hashBytes(
|
||||
HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(
|
||||
Rows.toGroupKey(
|
||||
truncatedTimestamp,
|
||||
|
|
|
@ -53,7 +53,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
public class Utils
|
||||
{
|
||||
private static final Logger log = new Logger(Utils.class);
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||
|
||||
public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting)
|
||||
throws IOException
|
||||
|
@ -120,7 +120,7 @@ public class Utils
|
|||
{
|
||||
FileSystem fs = statsPath.getFileSystem(job.getConfiguration());
|
||||
|
||||
return jsonMapper.readValue(
|
||||
return JSON_MAPPER.readValue(
|
||||
fs.open(statsPath),
|
||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||
);
|
||||
|
@ -128,7 +128,7 @@ public class Utils
|
|||
|
||||
public static void storeStats(JobContext job, Path path, Map<String, Object> stats) throws IOException
|
||||
{
|
||||
jsonMapper.writeValue(makePathAndOutputStream(job, path, true), stats);
|
||||
JSON_MAPPER.writeValue(makePathAndOutputStream(job, path, true), stats);
|
||||
}
|
||||
|
||||
public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper)
|
||||
|
|
|
@ -45,11 +45,11 @@ import java.util.List;
|
|||
*/
|
||||
public class HadoopDruidIndexerConfigTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper;
|
||||
private static final ObjectMapper JSON_MAPPER;
|
||||
|
||||
static {
|
||||
jsonMapper = new DefaultObjectMapper();
|
||||
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
|
||||
JSON_MAPPER = new DefaultObjectMapper();
|
||||
JSON_MAPPER.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, JSON_MAPPER));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -75,7 +75,7 @@ public class HadoopDruidIndexerConfigTest
|
|||
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
||||
),
|
||||
null,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
),
|
||||
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
||||
new HadoopTuningConfig(
|
||||
|
@ -144,7 +144,7 @@ public class HadoopDruidIndexerConfigTest
|
|||
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
||||
),
|
||||
null,
|
||||
jsonMapper
|
||||
JSON_MAPPER
|
||||
),
|
||||
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
||||
new HadoopTuningConfig(
|
||||
|
|
|
@ -51,11 +51,11 @@ import java.util.Map;
|
|||
*/
|
||||
public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||
{
|
||||
private static final String testDatasource = "test";
|
||||
private static final String testDatasource2 = "test2";
|
||||
private static final Interval testDatasourceInterval = Intervals.of("1970/3000");
|
||||
private static final Interval testDatasourceInterval2 = Intervals.of("2000/2001");
|
||||
private static final Interval testDatasourceIntervalPartial = Intervals.of("2050/3000");
|
||||
private static final String TEST_DATA_SOURCE = "test";
|
||||
private static final String TEST_DATA_SOURCE2 = "test2";
|
||||
private static final Interval TEST_DATA_SOURCE_INTERVAL = Intervals.of("1970/3000");
|
||||
private static final Interval TEST_DATA_SOURCE_INTERVAL2 = Intervals.of("2000/2001");
|
||||
private static final Interval TEST_DATA_SOURCE_INTERVAL_PARTIAL = Intervals.of("2050/3000");
|
||||
|
||||
private final ObjectMapper jsonMapper;
|
||||
|
||||
|
@ -70,7 +70,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
}
|
||||
|
||||
private static final DataSegment SEGMENT = new DataSegment(
|
||||
testDatasource,
|
||||
TEST_DATA_SOURCE,
|
||||
Intervals.of("2000/3000"),
|
||||
"ver",
|
||||
ImmutableMap.of(
|
||||
|
@ -85,7 +85,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
);
|
||||
|
||||
private static final DataSegment SEGMENT2 = new DataSegment(
|
||||
testDatasource2,
|
||||
TEST_DATA_SOURCE2,
|
||||
Intervals.of("2000/3000"),
|
||||
"ver2",
|
||||
ImmutableMap.of(
|
||||
|
@ -112,13 +112,13 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
{
|
||||
PathSpec pathSpec = new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null),
|
||||
new DatasourceIngestionSpec(TEST_DATA_SOURCE, TEST_DATA_SOURCE_INTERVAL, null, null, null, null, null, false, null),
|
||||
null,
|
||||
false
|
||||
);
|
||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||
pathSpec,
|
||||
testDatasourceInterval
|
||||
TEST_DATA_SOURCE_INTERVAL
|
||||
);
|
||||
Assert.assertEquals(
|
||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||
|
@ -132,8 +132,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
PathSpec pathSpec = new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(
|
||||
testDatasource,
|
||||
testDatasourceInterval,
|
||||
TEST_DATA_SOURCE,
|
||||
TEST_DATA_SOURCE_INTERVAL,
|
||||
null,
|
||||
ImmutableList.of(SEGMENT),
|
||||
null,
|
||||
|
@ -147,7 +147,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
);
|
||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||
pathSpec,
|
||||
testDatasourceInterval
|
||||
TEST_DATA_SOURCE_INTERVAL
|
||||
);
|
||||
Assert.assertEquals(
|
||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||
|
@ -161,8 +161,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
PathSpec pathSpec = new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(
|
||||
testDatasource,
|
||||
testDatasourceInterval,
|
||||
TEST_DATA_SOURCE,
|
||||
TEST_DATA_SOURCE_INTERVAL,
|
||||
null,
|
||||
ImmutableList.of(SEGMENT.withVersion("v2")),
|
||||
null,
|
||||
|
@ -176,7 +176,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
);
|
||||
testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||
pathSpec,
|
||||
testDatasourceInterval
|
||||
TEST_DATA_SOURCE_INTERVAL
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -187,8 +187,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
PathSpec pathSpec = new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(
|
||||
testDatasource,
|
||||
testDatasourceIntervalPartial,
|
||||
TEST_DATA_SOURCE,
|
||||
TEST_DATA_SOURCE_INTERVAL_PARTIAL,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
|
@ -202,10 +202,10 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
);
|
||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||
pathSpec,
|
||||
testDatasourceIntervalPartial
|
||||
TEST_DATA_SOURCE_INTERVAL_PARTIAL
|
||||
);
|
||||
Assert.assertEquals(
|
||||
ImmutableList.of(new WindowedDataSegment(SEGMENT, testDatasourceIntervalPartial)),
|
||||
ImmutableList.of(new WindowedDataSegment(SEGMENT, TEST_DATA_SOURCE_INTERVAL_PARTIAL)),
|
||||
((DatasourcePathSpec) config.getPathSpec()).getSegments()
|
||||
);
|
||||
}
|
||||
|
@ -219,8 +219,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(
|
||||
testDatasource,
|
||||
testDatasourceInterval,
|
||||
TEST_DATA_SOURCE,
|
||||
TEST_DATA_SOURCE_INTERVAL,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
|
@ -235,8 +235,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
new DatasourcePathSpec(
|
||||
null,
|
||||
new DatasourceIngestionSpec(
|
||||
testDatasource2,
|
||||
testDatasourceInterval2,
|
||||
TEST_DATA_SOURCE2,
|
||||
TEST_DATA_SOURCE_INTERVAL2,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
|
@ -252,14 +252,14 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
);
|
||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||
pathSpec,
|
||||
testDatasourceInterval
|
||||
TEST_DATA_SOURCE_INTERVAL
|
||||
);
|
||||
Assert.assertEquals(
|
||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(1)).getSegments()
|
||||
);
|
||||
Assert.assertEquals(
|
||||
ImmutableList.of(new WindowedDataSegment(SEGMENT2, testDatasourceInterval2)),
|
||||
ImmutableList.of(new WindowedDataSegment(SEGMENT2, TEST_DATA_SOURCE_INTERVAL2)),
|
||||
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(2)).getSegments()
|
||||
);
|
||||
}
|
||||
|
@ -300,15 +300,15 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
|||
|
||||
EasyMock.expect(
|
||||
segmentLister.getUsedSegmentsForIntervals(
|
||||
testDatasource,
|
||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval) : null)
|
||||
TEST_DATA_SOURCE,
|
||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL) : null)
|
||||
)
|
||||
).andReturn(ImmutableList.of(SEGMENT));
|
||||
|
||||
EasyMock.expect(
|
||||
segmentLister.getUsedSegmentsForIntervals(
|
||||
testDatasource2,
|
||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval2) : null)
|
||||
TEST_DATA_SOURCE2,
|
||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL2) : null)
|
||||
)
|
||||
).andReturn(ImmutableList.of(SEGMENT2));
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
|||
*/
|
||||
public class HadoopTuningConfigTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testSerde() throws Exception
|
||||
|
@ -64,7 +64,7 @@ public class HadoopTuningConfigTest
|
|||
null
|
||||
);
|
||||
|
||||
HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class);
|
||||
HadoopTuningConfig actual = jsonReadWriteRead(JSON_MAPPER.writeValueAsString(expected), HadoopTuningConfig.class);
|
||||
|
||||
Assert.assertEquals("/tmp/workingpath", actual.getWorkingPath());
|
||||
Assert.assertEquals("version", actual.getVersion());
|
||||
|
@ -88,7 +88,7 @@ public class HadoopTuningConfigTest
|
|||
public static <T> T jsonReadWriteRead(String s, Class<T> klass)
|
||||
{
|
||||
try {
|
||||
return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass);
|
||||
return JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsBytes(JSON_MAPPER.readValue(s, klass)), klass);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
|
|
|
@ -84,12 +84,12 @@ import java.util.TreeMap;
|
|||
@RunWith(Parameterized.class)
|
||||
public class IndexGeneratorJobTest
|
||||
{
|
||||
private static final AggregatorFactory[] aggs1 = {
|
||||
private static final AggregatorFactory[] AGGS1 = {
|
||||
new LongSumAggregatorFactory("visited_num", "visited_num"),
|
||||
new HyperUniquesAggregatorFactory("unique_hosts", "host")
|
||||
};
|
||||
|
||||
private static final AggregatorFactory[] aggs2 = {
|
||||
private static final AggregatorFactory[] AGGS2 = {
|
||||
new CountAggregatorFactory("count")
|
||||
};
|
||||
|
||||
|
@ -156,7 +156,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
null,
|
||||
null,
|
||||
aggs1,
|
||||
AGGS1,
|
||||
"website"
|
||||
},
|
||||
{
|
||||
|
@ -204,7 +204,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
null,
|
||||
null,
|
||||
aggs1,
|
||||
AGGS1,
|
||||
"website"
|
||||
},
|
||||
{
|
||||
|
@ -253,7 +253,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
null,
|
||||
null,
|
||||
aggs1,
|
||||
AGGS1,
|
||||
"website"
|
||||
},
|
||||
{
|
||||
|
@ -311,7 +311,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
null,
|
||||
null,
|
||||
aggs1,
|
||||
AGGS1,
|
||||
"website"
|
||||
},
|
||||
{
|
||||
|
@ -344,7 +344,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
1, // force 1 row max per index for easier testing
|
||||
null,
|
||||
aggs2,
|
||||
AGGS2,
|
||||
"inherit_dims"
|
||||
},
|
||||
{
|
||||
|
@ -377,7 +377,7 @@ public class IndexGeneratorJobTest
|
|||
),
|
||||
1, // force 1 row max per index for easier testing
|
||||
null,
|
||||
aggs2,
|
||||
AGGS2,
|
||||
"inherit_dims2"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.junit.Test;
|
|||
|
||||
public class MetadataStorageUpdaterJobSpecTest
|
||||
{
|
||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
||||
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||
|
||||
@Test
|
||||
public void testMetadaStorageConnectionConfigSimplePassword() throws Exception
|
||||
|
@ -62,7 +62,7 @@ public class MetadataStorageUpdaterJobSpecTest
|
|||
String pwd
|
||||
) throws Exception
|
||||
{
|
||||
MetadataStorageUpdaterJobSpec spec = jsonMapper.readValue(
|
||||
MetadataStorageUpdaterJobSpec spec = JSON_MAPPER.readValue(
|
||||
"{" +
|
||||
"\"type\": \"" + type + "\",\n" +
|
||||
"\"connectURI\": \"" + connectURI + "\",\n" +
|
||||
|
|
|
@ -39,14 +39,14 @@ import java.io.File;
|
|||
@JsonTypeName("realtime_appenderator")
|
||||
public class RealtimeAppenderatorTuningConfig implements TuningConfig, AppenderatorConfig
|
||||
{
|
||||
private static final int defaultMaxRowsInMemory = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY;
|
||||
private static final Period defaultIntermediatePersistPeriod = new Period("PT10M");
|
||||
private static final int defaultMaxPendingPersists = 0;
|
||||
private static final ShardSpec defaultShardSpec = new NumberedShardSpec(0, 1);
|
||||
private static final IndexSpec defaultIndexSpec = new IndexSpec();
|
||||
private static final Boolean defaultReportParseExceptions = Boolean.FALSE;
|
||||
private static final long defaultPublishAndHandoffTimeout = 0;
|
||||
private static final long defaultAlertTimeout = 0;
|
||||
private static final int DEFAULT_MAX_ROWS_IN_MEMORY = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY;
|
||||
private static final Period DEFAULT_INTERMEDIATE_PERSIST_PERIOD = new Period("PT10M");
|
||||
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
|
||||
private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1);
|
||||
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
|
||||
private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE;
|
||||
private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0;
|
||||
private static final long DEFAULT_ALERT_TIMEOUT = 0;
|
||||
|
||||
private static File createNewBasePersistDirectory()
|
||||
{
|
||||
|
@ -93,29 +93,29 @@ public class RealtimeAppenderatorTuningConfig implements TuningConfig, Appendera
|
|||
@JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions
|
||||
)
|
||||
{
|
||||
this.maxRowsInMemory = maxRowsInMemory == null ? defaultMaxRowsInMemory : maxRowsInMemory;
|
||||
this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY : maxRowsInMemory;
|
||||
// initializing this to 0, it will be lazily intialized to a value
|
||||
// @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long)
|
||||
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
|
||||
this.partitionsSpec = new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows);
|
||||
this.intermediatePersistPeriod = intermediatePersistPeriod == null
|
||||
? defaultIntermediatePersistPeriod
|
||||
? DEFAULT_INTERMEDIATE_PERSIST_PERIOD
|
||||
: intermediatePersistPeriod;
|
||||
this.basePersistDirectory = basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory;
|
||||
this.maxPendingPersists = maxPendingPersists == null ? defaultMaxPendingPersists : maxPendingPersists;
|
||||
this.shardSpec = shardSpec == null ? defaultShardSpec : shardSpec;
|
||||
this.indexSpec = indexSpec == null ? defaultIndexSpec : indexSpec;
|
||||
this.maxPendingPersists = maxPendingPersists == null ? DEFAULT_MAX_PENDING_PERSISTS : maxPendingPersists;
|
||||
this.shardSpec = shardSpec == null ? DEFAULT_SHARD_SPEC : shardSpec;
|
||||
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
|
||||
this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ?
|
||||
this.indexSpec : indexSpecForIntermediatePersists;
|
||||
this.reportParseExceptions = reportParseExceptions == null
|
||||
? defaultReportParseExceptions
|
||||
? DEFAULT_REPORT_PARSE_EXCEPTIONS
|
||||
: reportParseExceptions;
|
||||
this.publishAndHandoffTimeout = publishAndHandoffTimeout == null
|
||||
? defaultPublishAndHandoffTimeout
|
||||
? DEFAULT_HANDOFF_CONDITION_TIMEOUT
|
||||
: publishAndHandoffTimeout;
|
||||
Preconditions.checkArgument(this.publishAndHandoffTimeout >= 0, "publishAndHandoffTimeout must be >= 0");
|
||||
|
||||
this.alertTimeout = alertTimeout == null ? defaultAlertTimeout : alertTimeout;
|
||||
this.alertTimeout = alertTimeout == null ? DEFAULT_ALERT_TIMEOUT : alertTimeout;
|
||||
Preconditions.checkArgument(this.alertTimeout >= 0, "alertTimeout must be >= 0");
|
||||
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
|
||||
|
||||
|
|
|
@ -703,7 +703,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
|
|||
// can be injected based on the configuration given in config.getSchema().getIOConfig().getMetadataUpdateSpec()
|
||||
final MetadataStorageUpdaterJobHandler maybeHandler;
|
||||
if (config.isUpdaterJobSpecSet()) {
|
||||
maybeHandler = injector.getInstance(MetadataStorageUpdaterJobHandler.class);
|
||||
maybeHandler = INJECTOR.getInstance(MetadataStorageUpdaterJobHandler.class);
|
||||
} else {
|
||||
maybeHandler = null;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue