mirror of https://github.com/apache/druid.git
Add Checkstyle for constant name static final (#8060)
* check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * check ctyle for constant field name * merging with upstream * review-1 * unknow changes * unknow changes * review-2 * merging with master * review-2 1 changes * review changes-2 2 * bug fix
This commit is contained in:
parent
d117bfb149
commit
33f0753a70
|
@ -48,7 +48,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
@Fork(value = 1)
|
@Fork(value = 1)
|
||||||
public class FlattenJSONBenchmark
|
public class FlattenJSONBenchmark
|
||||||
{
|
{
|
||||||
private static final int numEvents = 100000;
|
private static final int NUM_EVENTS = 100000;
|
||||||
|
|
||||||
List<String> flatInputs;
|
List<String> flatInputs;
|
||||||
List<String> nestedInputs;
|
List<String> nestedInputs;
|
||||||
|
@ -67,15 +67,15 @@ public class FlattenJSONBenchmark
|
||||||
{
|
{
|
||||||
FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil();
|
FlattenJSONBenchmarkUtil gen = new FlattenJSONBenchmarkUtil();
|
||||||
flatInputs = new ArrayList<String>();
|
flatInputs = new ArrayList<String>();
|
||||||
for (int i = 0; i < numEvents; i++) {
|
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||||
flatInputs.add(gen.generateFlatEvent());
|
flatInputs.add(gen.generateFlatEvent());
|
||||||
}
|
}
|
||||||
nestedInputs = new ArrayList<String>();
|
nestedInputs = new ArrayList<String>();
|
||||||
for (int i = 0; i < numEvents; i++) {
|
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||||
nestedInputs.add(gen.generateNestedEvent());
|
nestedInputs.add(gen.generateNestedEvent());
|
||||||
}
|
}
|
||||||
jqInputs = new ArrayList<String>();
|
jqInputs = new ArrayList<String>();
|
||||||
for (int i = 0; i < numEvents; i++) {
|
for (int i = 0; i < NUM_EVENTS; i++) {
|
||||||
jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested"
|
jqInputs.add(gen.generateNestedEvent()); // reuse the same event as "nested"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ public class FlattenJSONBenchmark
|
||||||
for (String s : parsed.keySet()) {
|
for (String s : parsed.keySet()) {
|
||||||
blackhole.consume(parsed.get(s));
|
blackhole.consume(parsed.get(s));
|
||||||
}
|
}
|
||||||
flatCounter = (flatCounter + 1) % numEvents;
|
flatCounter = (flatCounter + 1) % NUM_EVENTS;
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ public class FlattenJSONBenchmark
|
||||||
for (String s : parsed.keySet()) {
|
for (String s : parsed.keySet()) {
|
||||||
blackhole.consume(parsed.get(s));
|
blackhole.consume(parsed.get(s));
|
||||||
}
|
}
|
||||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ public class FlattenJSONBenchmark
|
||||||
for (String s : parsed.keySet()) {
|
for (String s : parsed.keySet()) {
|
||||||
blackhole.consume(parsed.get(s));
|
blackhole.consume(parsed.get(s));
|
||||||
}
|
}
|
||||||
jqCounter = (jqCounter + 1) % numEvents;
|
jqCounter = (jqCounter + 1) % NUM_EVENTS;
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ public class FlattenJSONBenchmark
|
||||||
for (String s : parsed.keySet()) {
|
for (String s : parsed.keySet()) {
|
||||||
blackhole.consume(parsed.get(s));
|
blackhole.consume(parsed.get(s));
|
||||||
}
|
}
|
||||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ public class FlattenJSONBenchmark
|
||||||
for (String s : parsed.keySet()) {
|
for (String s : parsed.keySet()) {
|
||||||
blackhole.consume(parsed.get(s));
|
blackhole.consume(parsed.get(s));
|
||||||
}
|
}
|
||||||
nestedCounter = (nestedCounter + 1) % numEvents;
|
nestedCounter = (nestedCounter + 1) % NUM_EVENTS;
|
||||||
return parsed;
|
return parsed;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class FloatCompressionBenchmarkFileGenerator
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class);
|
private static final Logger log = new Logger(FloatCompressionBenchmarkFileGenerator.class);
|
||||||
public static final int ROW_NUM = 5000000;
|
public static final int ROW_NUM = 5000000;
|
||||||
public static final List<CompressionStrategy> compressions =
|
public static final List<CompressionStrategy> COMPRESSIONS =
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
CompressionStrategy.LZ4,
|
CompressionStrategy.LZ4,
|
||||||
CompressionStrategy.NONE
|
CompressionStrategy.NONE
|
||||||
|
@ -138,7 +138,7 @@ public class FloatCompressionBenchmarkFileGenerator
|
||||||
|
|
||||||
// create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
|
// create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
|
||||||
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
||||||
for (CompressionStrategy compression : compressions) {
|
for (CompressionStrategy compression : COMPRESSIONS) {
|
||||||
String name = entry.getKey() + "-" + compression;
|
String name = entry.getKey() + "-" + compression;
|
||||||
log.info("%s: ", name);
|
log.info("%s: ", name);
|
||||||
File compFile = new File(dir, name);
|
File compFile = new File(dir, name);
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class GenericIndexedBenchmark
|
||||||
{
|
{
|
||||||
public static final int ITERATIONS = 10000;
|
public static final int ITERATIONS = 10000;
|
||||||
|
|
||||||
static final ObjectStrategy<byte[]> byteArrayStrategy = new ObjectStrategy<byte[]>()
|
static final ObjectStrategy<byte[]> BYTE_ARRAY_STRATEGY = new ObjectStrategy<byte[]>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public Class<byte[]> getClazz()
|
public Class<byte[]> getClazz()
|
||||||
|
@ -108,7 +108,7 @@ public class GenericIndexedBenchmark
|
||||||
GenericIndexedWriter<byte[]> genericIndexedWriter = new GenericIndexedWriter<>(
|
GenericIndexedWriter<byte[]> genericIndexedWriter = new GenericIndexedWriter<>(
|
||||||
new OffHeapMemorySegmentWriteOutMedium(),
|
new OffHeapMemorySegmentWriteOutMedium(),
|
||||||
"genericIndexedBenchmark",
|
"genericIndexedBenchmark",
|
||||||
byteArrayStrategy
|
BYTE_ARRAY_STRATEGY
|
||||||
);
|
);
|
||||||
genericIndexedWriter.open();
|
genericIndexedWriter.open();
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ public class GenericIndexedBenchmark
|
||||||
|
|
||||||
FileChannel fileChannel = FileChannel.open(file.toPath());
|
FileChannel fileChannel = FileChannel.open(file.toPath());
|
||||||
MappedByteBuffer byteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, file.length());
|
MappedByteBuffer byteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, file.length());
|
||||||
genericIndexed = GenericIndexed.read(byteBuffer, byteArrayStrategy, SmooshedFileMapper.load(smooshDir));
|
genericIndexed = GenericIndexed.read(byteBuffer, BYTE_ARRAY_STRATEGY, SmooshedFileMapper.load(smooshDir));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Setup(Level.Trial)
|
@Setup(Level.Trial)
|
||||||
|
|
|
@ -52,8 +52,8 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
private IncrementalIndex incFloatIndex;
|
private IncrementalIndex incFloatIndex;
|
||||||
private IncrementalIndex incStrIndex;
|
private IncrementalIndex incStrIndex;
|
||||||
private static AggregatorFactory[] aggs;
|
private static AggregatorFactory[] aggs;
|
||||||
static final int dimensionCount = 8;
|
static final int DIMENSION_COUNT = 8;
|
||||||
static final int maxRows = 250000;
|
static final int MAX_ROWS = 250000;
|
||||||
|
|
||||||
private ArrayList<InputRow> longRows = new ArrayList<InputRow>();
|
private ArrayList<InputRow> longRows = new ArrayList<InputRow>();
|
||||||
private ArrayList<InputRow> floatRows = new ArrayList<InputRow>();
|
private ArrayList<InputRow> floatRows = new ArrayList<InputRow>();
|
||||||
|
@ -61,9 +61,9 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(dimensionCount + 1);
|
final ArrayList<AggregatorFactory> ingestAggregatorFactories = new ArrayList<>(DIMENSION_COUNT + 1);
|
||||||
ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
|
ingestAggregatorFactories.add(new CountAggregatorFactory("rows"));
|
||||||
for (int i = 0; i < dimensionCount; ++i) {
|
for (int i = 0; i < DIMENSION_COUNT; ++i) {
|
||||||
ingestAggregatorFactories.add(
|
ingestAggregatorFactories.add(
|
||||||
new LongSumAggregatorFactory(
|
new LongSumAggregatorFactory(
|
||||||
StringUtils.format("sumResult%s", i),
|
StringUtils.format("sumResult%s", i),
|
||||||
|
@ -125,23 +125,23 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
.setSimpleTestingIndexSchema(aggs)
|
.setSimpleTestingIndexSchema(aggs)
|
||||||
.setDeserializeComplexMetrics(false)
|
.setDeserializeComplexMetrics(false)
|
||||||
.setReportParseExceptions(false)
|
.setReportParseExceptions(false)
|
||||||
.setMaxRowCount(maxRows)
|
.setMaxRowCount(MAX_ROWS)
|
||||||
.buildOnheap();
|
.buildOnheap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Setup
|
@Setup
|
||||||
public void setup()
|
public void setup()
|
||||||
{
|
{
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
longRows.add(getLongRow(0, dimensionCount));
|
longRows.add(getLongRow(0, DIMENSION_COUNT));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
floatRows.add(getFloatRow(0, dimensionCount));
|
floatRows.add(getFloatRow(0, DIMENSION_COUNT));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
stringRows.add(getStringRow(0, dimensionCount));
|
stringRows.add(getStringRow(0, DIMENSION_COUNT));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,10 +156,10 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
@Benchmark
|
@Benchmark
|
||||||
@BenchmarkMode(Mode.AverageTime)
|
@BenchmarkMode(Mode.AverageTime)
|
||||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||||
@OperationsPerInvocation(maxRows)
|
@OperationsPerInvocation(MAX_ROWS)
|
||||||
public void normalLongs(Blackhole blackhole) throws Exception
|
public void normalLongs(Blackhole blackhole) throws Exception
|
||||||
{
|
{
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
InputRow row = longRows.get(i);
|
InputRow row = longRows.get(i);
|
||||||
int rv = incIndex.add(row).getRowCount();
|
int rv = incIndex.add(row).getRowCount();
|
||||||
blackhole.consume(rv);
|
blackhole.consume(rv);
|
||||||
|
@ -169,10 +169,10 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
@Benchmark
|
@Benchmark
|
||||||
@BenchmarkMode(Mode.AverageTime)
|
@BenchmarkMode(Mode.AverageTime)
|
||||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||||
@OperationsPerInvocation(maxRows)
|
@OperationsPerInvocation(MAX_ROWS)
|
||||||
public void normalFloats(Blackhole blackhole) throws Exception
|
public void normalFloats(Blackhole blackhole) throws Exception
|
||||||
{
|
{
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
InputRow row = floatRows.get(i);
|
InputRow row = floatRows.get(i);
|
||||||
int rv = incFloatIndex.add(row).getRowCount();
|
int rv = incFloatIndex.add(row).getRowCount();
|
||||||
blackhole.consume(rv);
|
blackhole.consume(rv);
|
||||||
|
@ -182,10 +182,10 @@ public class IncrementalIndexRowTypeBenchmark
|
||||||
@Benchmark
|
@Benchmark
|
||||||
@BenchmarkMode(Mode.AverageTime)
|
@BenchmarkMode(Mode.AverageTime)
|
||||||
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
@OutputTimeUnit(TimeUnit.MICROSECONDS)
|
||||||
@OperationsPerInvocation(maxRows)
|
@OperationsPerInvocation(MAX_ROWS)
|
||||||
public void normalStrings(Blackhole blackhole) throws Exception
|
public void normalStrings(Blackhole blackhole) throws Exception
|
||||||
{
|
{
|
||||||
for (int i = 0; i < maxRows; i++) {
|
for (int i = 0; i < MAX_ROWS; i++) {
|
||||||
InputRow row = stringRows.get(i);
|
InputRow row = stringRows.get(i);
|
||||||
int rv = incStrIndex.add(row).getRowCount();
|
int rv = incStrIndex.add(row).getRowCount();
|
||||||
blackhole.consume(rv);
|
blackhole.consume(rv);
|
||||||
|
|
|
@ -46,11 +46,11 @@ public class LongCompressionBenchmarkFileGenerator
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(LongCompressionBenchmarkFileGenerator.class);
|
private static final Logger log = new Logger(LongCompressionBenchmarkFileGenerator.class);
|
||||||
public static final int ROW_NUM = 5000000;
|
public static final int ROW_NUM = 5000000;
|
||||||
public static final List<CompressionStrategy> compressions =
|
public static final List<CompressionStrategy> COMPRESSIONS =
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
CompressionStrategy.LZ4,
|
CompressionStrategy.LZ4,
|
||||||
CompressionStrategy.NONE);
|
CompressionStrategy.NONE);
|
||||||
public static final List<CompressionFactory.LongEncodingStrategy> encodings =
|
public static final List<CompressionFactory.LongEncodingStrategy> ENCODINGS =
|
||||||
ImmutableList.of(CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.LONGS);
|
ImmutableList.of(CompressionFactory.LongEncodingStrategy.AUTO, CompressionFactory.LongEncodingStrategy.LONGS);
|
||||||
|
|
||||||
private static String dirPath = "longCompress/";
|
private static String dirPath = "longCompress/";
|
||||||
|
@ -130,8 +130,8 @@ public class LongCompressionBenchmarkFileGenerator
|
||||||
|
|
||||||
// create compressed files using all combinations of CompressionStrategy and LongEncoding provided
|
// create compressed files using all combinations of CompressionStrategy and LongEncoding provided
|
||||||
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
|
||||||
for (CompressionStrategy compression : compressions) {
|
for (CompressionStrategy compression : COMPRESSIONS) {
|
||||||
for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
|
for (CompressionFactory.LongEncodingStrategy encoding : ENCODINGS) {
|
||||||
String name = entry.getKey() + "-" + compression + "-" + encoding;
|
String name = entry.getKey() + "-" + compression + "-" + encoding;
|
||||||
log.info("%s: ", name);
|
log.info("%s: ", name);
|
||||||
File compFile = new File(dir, name);
|
File compFile = new File(dir, name);
|
||||||
|
|
|
@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
public class StupidPoolConcurrencyBenchmark
|
public class StupidPoolConcurrencyBenchmark
|
||||||
{
|
{
|
||||||
private static final Object simpleObject = new Object();
|
private static final Object SIMPLE_OBJECT = new Object();
|
||||||
|
|
||||||
@State(Scope.Benchmark)
|
@State(Scope.Benchmark)
|
||||||
public static class BenchmarkPool
|
public static class BenchmarkPool
|
||||||
|
@ -50,7 +50,7 @@ public class StupidPoolConcurrencyBenchmark
|
||||||
public Object get()
|
public Object get()
|
||||||
{
|
{
|
||||||
numPools.incrementAndGet();
|
numPools.incrementAndGet();
|
||||||
return simpleObject;
|
return SIMPLE_OBJECT;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -116,12 +116,12 @@ public class TimeCompareBenchmark
|
||||||
@Param({"100"})
|
@Param({"100"})
|
||||||
private int threshold;
|
private int threshold;
|
||||||
|
|
||||||
protected static final Map<String, String> scriptDoubleSum = new HashMap<>();
|
protected static final Map<String, String> SCRIPT_DOUBLE_SUM = new HashMap<>();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
scriptDoubleSum.put("fnAggregate", "function aggregate(current, a) { return current + a }");
|
SCRIPT_DOUBLE_SUM.put("fnAggregate", "function aggregate(current, a) { return current + a }");
|
||||||
scriptDoubleSum.put("fnReset", "function reset() { return 0 }");
|
SCRIPT_DOUBLE_SUM.put("fnReset", "function reset() { return 0 }");
|
||||||
scriptDoubleSum.put("fnCombine", "function combine(a,b) { return a + b }");
|
SCRIPT_DOUBLE_SUM.put("fnCombine", "function combine(a,b) { return a + b }");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Logger log = new Logger(TimeCompareBenchmark.class);
|
private static final Logger log = new Logger(TimeCompareBenchmark.class);
|
||||||
|
|
|
@ -41,7 +41,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
@State(Scope.Benchmark)
|
@State(Scope.Benchmark)
|
||||||
public class CostBalancerStrategyBenchmark
|
public class CostBalancerStrategyBenchmark
|
||||||
{
|
{
|
||||||
private static final DateTime t0 = DateTimes.of("2016-01-01T01:00:00Z");
|
private static final DateTime T0 = DateTimes.of("2016-01-01T01:00:00Z");
|
||||||
|
|
||||||
private List<DataSegment> segments;
|
private List<DataSegment> segments;
|
||||||
private DataSegment segment;
|
private DataSegment segment;
|
||||||
|
@ -55,12 +55,12 @@ public class CostBalancerStrategyBenchmark
|
||||||
@Setup
|
@Setup
|
||||||
public void setupDummyCluster()
|
public void setupDummyCluster()
|
||||||
{
|
{
|
||||||
segment = createSegment(t0);
|
segment = createSegment(T0);
|
||||||
|
|
||||||
Random r = ThreadLocalRandom.current();
|
Random r = ThreadLocalRandom.current();
|
||||||
segments = new ArrayList<>(n);
|
segments = new ArrayList<>(n);
|
||||||
for (int i = 0; i < n; ++i) {
|
for (int i = 0; i < n; ++i) {
|
||||||
final DateTime t = t0.minusHours(r.nextInt(365 * 24) - 365 * 12);
|
final DateTime t = T0.minusHours(r.nextInt(365 * 24) - 365 * 12);
|
||||||
segments.add(createSegment(t));
|
segments.add(createSegment(t));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,13 @@
|
||||||
<suppress checks="Indentation" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
<suppress checks="Indentation" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||||
<suppress checks="Indentation" files="ProtoTestEventWrapper.java" />
|
<suppress checks="Indentation" files="ProtoTestEventWrapper.java" />
|
||||||
<suppress checks="Regexp" id="argumentLineBreaking" files="ProtoTestEventWrapper.java" />
|
<suppress checks="Regexp" id="argumentLineBreaking" files="ProtoTestEventWrapper.java" />
|
||||||
|
<suppress checks="ConstantName" files="ProtoTestEventWrapper.java" />
|
||||||
|
<suppress checks="ConstantName" files="MySubRecord.java" />
|
||||||
|
<suppress checks="ConstantName" files="SomeAvroDatum.java" />
|
||||||
|
<suppress checks="ConstantName" files="MyFixed.java" />
|
||||||
|
<suppress checks="ConstantName" files="MyEnum.java" />
|
||||||
|
<suppress checks="ConstantName" files="Author" />
|
||||||
|
<suppress checks="ConstantName" files="Book" />
|
||||||
|
|
||||||
<suppress checks="OneStatementPerLine" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
<suppress checks="OneStatementPerLine" files="[\\/]target[\\/]generated-test-sources[\\/]" />
|
||||||
|
|
||||||
|
|
|
@ -319,5 +319,8 @@ codestyle/checkstyle.xml. "/>
|
||||||
<module name="LocalFinalVariableName">
|
<module name="LocalFinalVariableName">
|
||||||
<property name="format" value="^[a-z_]*[a-z0-9][a-zA-Z0-9_]*$"/>
|
<property name="format" value="^[a-z_]*[a-z0-9][a-zA-Z0-9_]*$"/>
|
||||||
</module>
|
</module>
|
||||||
|
<module name="ConstantName">
|
||||||
|
<property name="format" value="^log(ger)?$|^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$"/>
|
||||||
|
</module>
|
||||||
</module>
|
</module>
|
||||||
</module>
|
</module>
|
||||||
|
|
|
@ -33,11 +33,11 @@ public class ReferenceCountingResourceHolder<T> implements ResourceHolder<T>
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(ReferenceCountingResourceHolder.class);
|
private static final Logger log = new Logger(ReferenceCountingResourceHolder.class);
|
||||||
|
|
||||||
private static final AtomicLong leakedResources = new AtomicLong();
|
private static final AtomicLong LEAKED_RESOURCES = new AtomicLong();
|
||||||
|
|
||||||
public static long leakedResources()
|
public static long leakedResources()
|
||||||
{
|
{
|
||||||
return leakedResources.get();
|
return LEAKED_RESOURCES.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
private final T object;
|
private final T object;
|
||||||
|
@ -164,7 +164,7 @@ public class ReferenceCountingResourceHolder<T> implements ResourceHolder<T>
|
||||||
}
|
}
|
||||||
if (refCount.compareAndSet(count, 0)) {
|
if (refCount.compareAndSet(count, 0)) {
|
||||||
try {
|
try {
|
||||||
leakedResources.incrementAndGet();
|
LEAKED_RESOURCES.incrementAndGet();
|
||||||
closer.close();
|
closer.close();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class TimestampSpec
|
||||||
private final Function<Object, DateTime> timestampConverter;
|
private final Function<Object, DateTime> timestampConverter;
|
||||||
|
|
||||||
// remember last value parsed
|
// remember last value parsed
|
||||||
private static final ThreadLocal<ParseCtx> parseCtx = ThreadLocal.withInitial(ParseCtx::new);
|
private static final ThreadLocal<ParseCtx> PARSE_CTX = ThreadLocal.withInitial(ParseCtx::new);
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public TimestampSpec(
|
public TimestampSpec(
|
||||||
|
@ -98,7 +98,7 @@ public class TimestampSpec
|
||||||
{
|
{
|
||||||
DateTime extracted = missingValue;
|
DateTime extracted = missingValue;
|
||||||
if (input != null) {
|
if (input != null) {
|
||||||
ParseCtx ctx = parseCtx.get();
|
ParseCtx ctx = PARSE_CTX.get();
|
||||||
// Check if the input is equal to the last input, so we don't need to parse it again
|
// Check if the input is equal to the last input, so we don't need to parse it again
|
||||||
if (input.equals(ctx.lastTimeObject)) {
|
if (input.equals(ctx.lastTimeObject)) {
|
||||||
extracted = ctx.lastDateTime;
|
extracted = ctx.lastDateTime;
|
||||||
|
@ -107,7 +107,7 @@ public class TimestampSpec
|
||||||
ParseCtx newCtx = new ParseCtx();
|
ParseCtx newCtx = new ParseCtx();
|
||||||
newCtx.lastTimeObject = input;
|
newCtx.lastTimeObject = input;
|
||||||
newCtx.lastDateTime = extracted;
|
newCtx.lastDateTime = extracted;
|
||||||
parseCtx.set(newCtx);
|
PARSE_CTX.set(newCtx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return extracted;
|
return extracted;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
|
public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
|
||||||
{
|
{
|
||||||
private static final Joiner joiner = Joiner.on(",");
|
private static final Joiner JOINER = Joiner.on(",");
|
||||||
|
|
||||||
protected CommaListJoinSerializer()
|
protected CommaListJoinSerializer()
|
||||||
{
|
{
|
||||||
|
@ -41,6 +41,6 @@ public class CommaListJoinSerializer extends StdScalarSerializer<List<String>>
|
||||||
@Override
|
@Override
|
||||||
public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider) throws IOException
|
public void serialize(List<String> value, JsonGenerator jgen, SerializerProvider provider) throws IOException
|
||||||
{
|
{
|
||||||
jgen.writeString(joiner.join(value));
|
jgen.writeString(JOINER.join(value));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,11 +43,11 @@ public abstract class Granularity implements Cacheable
|
||||||
/**
|
/**
|
||||||
* Default patterns for parsing paths.
|
* Default patterns for parsing paths.
|
||||||
*/
|
*/
|
||||||
private static final Pattern defaultPathPattern =
|
private static final Pattern DEFAULT_PATH_PATTERN =
|
||||||
Pattern.compile(
|
Pattern.compile(
|
||||||
"^.*[Yy]=(\\d{4})/(?:[Mm]=(\\d{2})/(?:[Dd]=(\\d{2})/(?:[Hh]=(\\d{2})/(?:[Mm]=(\\d{2})/(?:[Ss]=(\\d{2})/)?)?)?)?)?.*$"
|
"^.*[Yy]=(\\d{4})/(?:[Mm]=(\\d{2})/(?:[Dd]=(\\d{2})/(?:[Hh]=(\\d{2})/(?:[Mm]=(\\d{2})/(?:[Ss]=(\\d{2})/)?)?)?)?)?.*$"
|
||||||
);
|
);
|
||||||
private static final Pattern hivePathPattern =
|
private static final Pattern HIVE_PATH_PATTERN =
|
||||||
Pattern.compile("^.*dt=(\\d{4})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})?)?)?)?)?)?/.*$");
|
Pattern.compile("^.*dt=(\\d{4})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})(?:-(\\d{2})?)?)?)?)?)?/.*$");
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
|
@ -150,13 +150,13 @@ public abstract class Granularity implements Cacheable
|
||||||
// Used by the toDate implementations.
|
// Used by the toDate implementations.
|
||||||
final Integer[] getDateValues(String filePath, Formatter formatter)
|
final Integer[] getDateValues(String filePath, Formatter formatter)
|
||||||
{
|
{
|
||||||
Pattern pattern = defaultPathPattern;
|
Pattern pattern = DEFAULT_PATH_PATTERN;
|
||||||
switch (formatter) {
|
switch (formatter) {
|
||||||
case DEFAULT:
|
case DEFAULT:
|
||||||
case LOWER_DEFAULT:
|
case LOWER_DEFAULT:
|
||||||
break;
|
break;
|
||||||
case HIVE:
|
case HIVE:
|
||||||
pattern = hivePathPattern;
|
pattern = HIVE_PATH_PATTERN;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IAE("Format %s not supported", formatter);
|
throw new IAE("Format %s not supported", formatter);
|
||||||
|
|
|
@ -70,7 +70,7 @@ import java.util.TreeMap;
|
||||||
public class FileSmoosher implements Closeable
|
public class FileSmoosher implements Closeable
|
||||||
{
|
{
|
||||||
private static final String FILE_EXTENSION = "smoosh";
|
private static final String FILE_EXTENSION = "smoosh";
|
||||||
private static final Joiner joiner = Joiner.on(",");
|
private static final Joiner JOINER = Joiner.on(",");
|
||||||
private static final Logger LOG = new Logger(FileSmoosher.class);
|
private static final Logger LOG = new Logger(FileSmoosher.class);
|
||||||
|
|
||||||
private final File baseDir;
|
private final File baseDir;
|
||||||
|
@ -376,7 +376,7 @@ public class FileSmoosher implements Closeable
|
||||||
for (Map.Entry<String, Metadata> entry : internalFiles.entrySet()) {
|
for (Map.Entry<String, Metadata> entry : internalFiles.entrySet()) {
|
||||||
final Metadata metadata = entry.getValue();
|
final Metadata metadata = entry.getValue();
|
||||||
out.write(
|
out.write(
|
||||||
joiner.join(
|
JOINER.join(
|
||||||
entry.getKey(),
|
entry.getKey(),
|
||||||
metadata.getFileNum(),
|
metadata.getFileNum(),
|
||||||
metadata.getStartOffset(),
|
metadata.getStartOffset(),
|
||||||
|
|
|
@ -47,7 +47,7 @@ import java.util.Set;
|
||||||
@Deprecated
|
@Deprecated
|
||||||
public class JSONToLowerParser implements Parser<String, Object>
|
public class JSONToLowerParser implements Parser<String, Object>
|
||||||
{
|
{
|
||||||
private static final Function<JsonNode, Object> valueFunction = new Function<JsonNode, Object>()
|
private static final Function<JsonNode, Object> VALUE_FUNCTION = new Function<JsonNode, Object>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public Object apply(JsonNode node)
|
public Object apply(JsonNode node)
|
||||||
|
@ -128,14 +128,14 @@ public class JSONToLowerParser implements Parser<String, Object>
|
||||||
if (node.isArray()) {
|
if (node.isArray()) {
|
||||||
final List<Object> nodeValue = Lists.newArrayListWithExpectedSize(node.size());
|
final List<Object> nodeValue = Lists.newArrayListWithExpectedSize(node.size());
|
||||||
for (final JsonNode subnode : node) {
|
for (final JsonNode subnode : node) {
|
||||||
final Object subnodeValue = valueFunction.apply(subnode);
|
final Object subnodeValue = VALUE_FUNCTION.apply(subnode);
|
||||||
if (subnodeValue != null) {
|
if (subnodeValue != null) {
|
||||||
nodeValue.add(subnodeValue);
|
nodeValue.add(subnodeValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
||||||
} else {
|
} else {
|
||||||
final Object nodeValue = valueFunction.apply(node);
|
final Object nodeValue = VALUE_FUNCTION.apply(node);
|
||||||
if (nodeValue != null) {
|
if (nodeValue != null) {
|
||||||
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
map.put(StringUtils.toLowerCase(key), nodeValue); // difference from JSONParser parse()
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
||||||
private static final byte[] LARGE_EVENTS_STOP = new byte[]{};
|
private static final byte[] LARGE_EVENTS_STOP = new byte[]{};
|
||||||
|
|
||||||
private static final Logger log = new Logger(HttpPostEmitter.class);
|
private static final Logger log = new Logger(HttpPostEmitter.class);
|
||||||
private static final AtomicInteger instanceCounter = new AtomicInteger();
|
private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger();
|
||||||
|
|
||||||
final BatchingStrategy batchingStrategy;
|
final BatchingStrategy batchingStrategy;
|
||||||
final HttpEmitterConfig config;
|
final HttpEmitterConfig config;
|
||||||
|
@ -484,7 +484,7 @@ public class HttpPostEmitter implements Flushable, Closeable, Emitter
|
||||||
|
|
||||||
EmittingThread(HttpEmitterConfig config)
|
EmittingThread(HttpEmitterConfig config)
|
||||||
{
|
{
|
||||||
super("HttpPostEmitter-" + instanceCounter.incrementAndGet());
|
super("HttpPostEmitter-" + INSTANCE_COUNTER.incrementAndGet());
|
||||||
setDaemon(true);
|
setDaemon(true);
|
||||||
timeoutLessThanMinimumException = new TimeoutException(
|
timeoutLessThanMinimumException = new TimeoutException(
|
||||||
"Timeout less than minimum [" + config.getMinHttpTimeoutMillis() + "] ms."
|
"Timeout less than minimum [" + config.getMinHttpTimeoutMillis() + "] ms."
|
||||||
|
|
|
@ -44,7 +44,7 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public class Request
|
public class Request
|
||||||
{
|
{
|
||||||
private static final ChannelBufferFactory factory = HeapChannelBufferFactory.getInstance();
|
private static final ChannelBufferFactory FACTORY = HeapChannelBufferFactory.getInstance();
|
||||||
|
|
||||||
private final HttpMethod method;
|
private final HttpMethod method;
|
||||||
private final URL url;
|
private final URL url;
|
||||||
|
@ -147,7 +147,7 @@ public class Request
|
||||||
|
|
||||||
public Request setContent(String contentType, byte[] bytes, int offset, int length)
|
public Request setContent(String contentType, byte[] bytes, int offset, int length)
|
||||||
{
|
{
|
||||||
return setContent(contentType, factory.getBuffer(bytes, offset, length));
|
return setContent(contentType, FACTORY.getBuffer(bytes, offset, length));
|
||||||
}
|
}
|
||||||
|
|
||||||
public Request setContent(String contentType, ChannelBuffer content)
|
public Request setContent(String contentType, ChannelBuffer content)
|
||||||
|
|
|
@ -73,12 +73,12 @@ public class SigarUtil
|
||||||
*/
|
*/
|
||||||
private static class CurrentProcessIdHolder
|
private static class CurrentProcessIdHolder
|
||||||
{
|
{
|
||||||
private static final long currentProcessId = new Sigar().getPid();
|
private static final long CURRENT_PROCESS_ID = new Sigar().getPid();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static long getCurrentProcessId()
|
public static long getCurrentProcessId()
|
||||||
{
|
{
|
||||||
return CurrentProcessIdHolder.currentProcessId;
|
return CurrentProcessIdHolder.CURRENT_PROCESS_ID;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ import java.util.List;
|
||||||
|
|
||||||
public class HashBasedNumberedShardSpec extends NumberedShardSpec
|
public class HashBasedNumberedShardSpec extends NumberedShardSpec
|
||||||
{
|
{
|
||||||
private static final HashFunction hashFunction = Hashing.murmur3_32();
|
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_32();
|
||||||
private static final List<String> DEFAULT_PARTITION_DIMENSIONS = ImmutableList.of();
|
private static final List<String> DEFAULT_PARTITION_DIMENSIONS = ImmutableList.of();
|
||||||
|
|
||||||
private final ObjectMapper jsonMapper;
|
private final ObjectMapper jsonMapper;
|
||||||
|
@ -100,7 +100,7 @@ public class HashBasedNumberedShardSpec extends NumberedShardSpec
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public static int hash(ObjectMapper jsonMapper, List<Object> objects) throws JsonProcessingException
|
public static int hash(ObjectMapper jsonMapper, List<Object> objects) throws JsonProcessingException
|
||||||
{
|
{
|
||||||
return hashFunction.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt();
|
return HASH_FUNCTION.hashBytes(jsonMapper.writeValueAsBytes(objects)).asInt();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -27,14 +27,14 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class SerializablePairTest
|
public class SerializablePairTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBytesSerde() throws IOException
|
public void testBytesSerde() throws IOException
|
||||||
{
|
{
|
||||||
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
||||||
byte[] bytes = jsonMapper.writeValueAsBytes(pair);
|
byte[] bytes = JSON_MAPPER.writeValueAsBytes(pair);
|
||||||
SerializablePair<Number, Number> deserializedPair = jsonMapper.readValue(bytes, SerializablePair.class);
|
SerializablePair<Number, Number> deserializedPair = JSON_MAPPER.readValue(bytes, SerializablePair.class);
|
||||||
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
||||||
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
||||||
}
|
}
|
||||||
|
@ -43,8 +43,8 @@ public class SerializablePairTest
|
||||||
public void testStringSerde() throws IOException
|
public void testStringSerde() throws IOException
|
||||||
{
|
{
|
||||||
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
SerializablePair pair = new SerializablePair<>(5L, 9L);
|
||||||
String str = jsonMapper.writeValueAsString(pair);
|
String str = JSON_MAPPER.writeValueAsString(pair);
|
||||||
SerializablePair<Number, Number> deserializedPair = jsonMapper.readValue(str, SerializablePair.class);
|
SerializablePair<Number, Number> deserializedPair = JSON_MAPPER.readValue(str, SerializablePair.class);
|
||||||
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
Assert.assertEquals(pair.lhs, deserializedPair.lhs.longValue());
|
||||||
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
Assert.assertEquals(pair.rhs, deserializedPair.rhs.longValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
{
|
{
|
||||||
private static long FILE_SIZE = -1;
|
private static long FILE_SIZE = -1;
|
||||||
|
|
||||||
private static final StringInputRowParser parser = new StringInputRowParser(
|
private static final StringInputRowParser PARSER = new StringInputRowParser(
|
||||||
new CSVParseSpec(
|
new CSVParseSpec(
|
||||||
new TimestampSpec(
|
new TimestampSpec(
|
||||||
"timestamp",
|
"timestamp",
|
||||||
|
@ -163,7 +163,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCacheAndFetch");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithoutCache");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -220,7 +220,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithZeroFetchCapacity");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -238,7 +238,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithCacheAndFetch");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -256,7 +256,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithLargeCacheAndSmallFetch");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -274,7 +274,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
|
final File firehoseTmpDir = createFirehoseTmpDir("testWithSmallCacheAndLargeFetch");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -292,7 +292,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
final File firehoseTmpDir = createFirehoseTmpDir("testRetry");
|
final File firehoseTmpDir = createFirehoseTmpDir("testRetry");
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
rows.add(firehose.nextRow());
|
rows.add(firehose.nextRow());
|
||||||
}
|
}
|
||||||
|
@ -312,7 +312,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
final TestPrefetchableTextFilesFirehoseFactory factory =
|
final TestPrefetchableTextFilesFirehoseFactory factory =
|
||||||
TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5);
|
TestPrefetchableTextFilesFirehoseFactory.withOpenExceptions(TEST_DIR, 5);
|
||||||
|
|
||||||
try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testMaxRetry"))) {
|
try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testMaxRetry"))) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
firehose.nextRow();
|
firehose.nextRow();
|
||||||
}
|
}
|
||||||
|
@ -328,7 +328,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
final TestPrefetchableTextFilesFirehoseFactory factory =
|
final TestPrefetchableTextFilesFirehoseFactory factory =
|
||||||
TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000);
|
TestPrefetchableTextFilesFirehoseFactory.withSleepMillis(TEST_DIR, 1000);
|
||||||
|
|
||||||
try (Firehose firehose = factory.connect(parser, createFirehoseTmpDir("testTimeout"))) {
|
try (Firehose firehose = factory.connect(PARSER, createFirehoseTmpDir("testTimeout"))) {
|
||||||
while (firehose.hasMore()) {
|
while (firehose.hasMore()) {
|
||||||
firehose.nextRow();
|
firehose.nextRow();
|
||||||
}
|
}
|
||||||
|
@ -344,7 +344,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
||||||
}
|
}
|
||||||
|
@ -367,7 +367,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
|
||||||
|
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
final List<Row> rows = new ArrayList<>();
|
final List<Row> rows = new ArrayList<>();
|
||||||
try (Firehose firehose = factory.connect(parser, firehoseTmpDir)) {
|
try (Firehose firehose = factory.connect(PARSER, firehoseTmpDir)) {
|
||||||
if (i > 0) {
|
if (i > 0) {
|
||||||
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
Assert.assertEquals(FILE_SIZE * 2, factory.getCacheManager().getTotalCachedBytes());
|
||||||
}
|
}
|
||||||
|
|
|
@ -62,9 +62,9 @@ import java.util.zip.ZipOutputStream;
|
||||||
|
|
||||||
public class CompressionUtilsTest
|
public class CompressionUtilsTest
|
||||||
{
|
{
|
||||||
private static final String content;
|
private static final String CONTENT;
|
||||||
private static final byte[] expected;
|
private static final byte[] EXPECTED;
|
||||||
private static final byte[] gzBytes;
|
private static final byte[] GZ_BYTES;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
final StringBuilder builder = new StringBuilder();
|
final StringBuilder builder = new StringBuilder();
|
||||||
|
@ -79,19 +79,19 @@ public class CompressionUtilsTest
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
content = builder.toString();
|
CONTENT = builder.toString();
|
||||||
expected = StringUtils.toUtf8(content);
|
EXPECTED = StringUtils.toUtf8(CONTENT);
|
||||||
|
|
||||||
final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(expected.length);
|
final ByteArrayOutputStream gzByteStream = new ByteArrayOutputStream(EXPECTED.length);
|
||||||
try (GZIPOutputStream outputStream = new GZIPOutputStream(gzByteStream)) {
|
try (GZIPOutputStream outputStream = new GZIPOutputStream(gzByteStream)) {
|
||||||
try (ByteArrayInputStream in = new ByteArrayInputStream(expected)) {
|
try (ByteArrayInputStream in = new ByteArrayInputStream(EXPECTED)) {
|
||||||
ByteStreams.copy(in, outputStream);
|
ByteStreams.copy(in, outputStream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
gzBytes = gzByteStream.toByteArray();
|
GZ_BYTES = gzByteStream.toByteArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
|
@ -101,9 +101,9 @@ public class CompressionUtilsTest
|
||||||
|
|
||||||
public static void assertGoodDataStream(InputStream stream) throws IOException
|
public static void assertGoodDataStream(InputStream stream) throws IOException
|
||||||
{
|
{
|
||||||
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length)) {
|
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length)) {
|
||||||
ByteStreams.copy(stream, bos);
|
ByteStreams.copy(stream, bos);
|
||||||
Assert.assertArrayEquals(expected, bos.toByteArray());
|
Assert.assertArrayEquals(EXPECTED, bos.toByteArray());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +113,7 @@ public class CompressionUtilsTest
|
||||||
testDir = temporaryFolder.newFolder("testDir");
|
testDir = temporaryFolder.newFolder("testDir");
|
||||||
testFile = new File(testDir, "test.dat");
|
testFile = new File(testDir, "test.dat");
|
||||||
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
||||||
outputStream.write(StringUtils.toUtf8(content));
|
outputStream.write(StringUtils.toUtf8(CONTENT));
|
||||||
}
|
}
|
||||||
Assert.assertTrue(testFile.getParentFile().equals(testDir));
|
Assert.assertTrue(testFile.getParentFile().equals(testDir));
|
||||||
}
|
}
|
||||||
|
@ -395,35 +395,35 @@ public class CompressionUtilsTest
|
||||||
{
|
{
|
||||||
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
try (OutputStream outputStream = new FileOutputStream(testFile)) {
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
gzBytes.length,
|
GZ_BYTES.length,
|
||||||
ByteStreams.copy(
|
ByteStreams.copy(
|
||||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||||
outputStream
|
outputStream
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
gzBytes.length,
|
GZ_BYTES.length,
|
||||||
ByteStreams.copy(
|
ByteStreams.copy(
|
||||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||||
outputStream
|
outputStream
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
gzBytes.length,
|
GZ_BYTES.length,
|
||||||
ByteStreams.copy(
|
ByteStreams.copy(
|
||||||
new ZeroRemainingInputStream(new ByteArrayInputStream(gzBytes)),
|
new ZeroRemainingInputStream(new ByteArrayInputStream(GZ_BYTES)),
|
||||||
outputStream
|
outputStream
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
Assert.assertEquals(gzBytes.length * 3, testFile.length());
|
Assert.assertEquals(GZ_BYTES.length * 3, testFile.length());
|
||||||
try (InputStream inputStream = new ZeroRemainingInputStream(new FileInputStream(testFile))) {
|
try (InputStream inputStream = new ZeroRemainingInputStream(new FileInputStream(testFile))) {
|
||||||
for (int i = 0; i < 3; ++i) {
|
for (int i = 0; i < 3; ++i) {
|
||||||
final byte[] bytes = new byte[gzBytes.length];
|
final byte[] bytes = new byte[GZ_BYTES.length];
|
||||||
Assert.assertEquals(bytes.length, inputStream.read(bytes));
|
Assert.assertEquals(bytes.length, inputStream.read(bytes));
|
||||||
Assert.assertArrayEquals(
|
Assert.assertArrayEquals(
|
||||||
StringUtils.format("Failed on range %d", i),
|
StringUtils.format("Failed on range %d", i),
|
||||||
gzBytes,
|
GZ_BYTES,
|
||||||
bytes
|
bytes
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -435,10 +435,10 @@ public class CompressionUtilsTest
|
||||||
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144
|
// http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144
|
||||||
public void testGunzipBug() throws IOException
|
public void testGunzipBug() throws IOException
|
||||||
{
|
{
|
||||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
try (final InputStream inputStream = new GZIPInputStream(
|
try (final InputStream inputStream = new GZIPInputStream(
|
||||||
new ZeroRemainingInputStream(
|
new ZeroRemainingInputStream(
|
||||||
new ByteArrayInputStream(
|
new ByteArrayInputStream(
|
||||||
|
@ -446,17 +446,17 @@ public class CompressionUtilsTest
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)) {
|
)) {
|
||||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) {
|
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
"Read terminated too soon (bug 7036144)",
|
"Read terminated too soon (bug 7036144)",
|
||||||
expected.length * 3,
|
EXPECTED.length * 3,
|
||||||
ByteStreams.copy(inputStream, outputStream)
|
ByteStreams.copy(inputStream, outputStream)
|
||||||
);
|
);
|
||||||
final byte[] found = outputStream.toByteArray();
|
final byte[] found = outputStream.toByteArray();
|
||||||
Assert.assertEquals(expected.length * 3, found.length);
|
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -468,10 +468,10 @@ public class CompressionUtilsTest
|
||||||
testFile.delete();
|
testFile.delete();
|
||||||
Assert.assertFalse(testFile.exists());
|
Assert.assertFalse(testFile.exists());
|
||||||
|
|
||||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
|
|
||||||
final ByteSource inputStreamFactory = new ByteSource()
|
final ByteSource inputStreamFactory = new ByteSource()
|
||||||
{
|
{
|
||||||
|
@ -482,20 +482,20 @@ public class CompressionUtilsTest
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Assert.assertEquals((long) (expected.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size());
|
Assert.assertEquals((long) (EXPECTED.length * 3), CompressionUtils.gunzip(inputStreamFactory, testFile).size());
|
||||||
|
|
||||||
try (final InputStream inputStream = new FileInputStream(testFile)) {
|
try (final InputStream inputStream = new FileInputStream(testFile)) {
|
||||||
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(expected.length * 3)) {
|
try (final ByteArrayOutputStream outputStream = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
"Read terminated too soon (7036144)",
|
"Read terminated too soon (7036144)",
|
||||||
expected.length * 3,
|
EXPECTED.length * 3,
|
||||||
ByteStreams.copy(inputStream, outputStream)
|
ByteStreams.copy(inputStream, outputStream)
|
||||||
);
|
);
|
||||||
final byte[] found = outputStream.toByteArray();
|
final byte[] found = outputStream.toByteArray();
|
||||||
Assert.assertEquals(expected.length * 3, found.length);
|
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -505,14 +505,14 @@ public class CompressionUtilsTest
|
||||||
public void testGunzipBugStreamWorkarround() throws IOException
|
public void testGunzipBugStreamWorkarround() throws IOException
|
||||||
{
|
{
|
||||||
|
|
||||||
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(gzBytes.length * 3);
|
final ByteArrayOutputStream tripleGzByteStream = new ByteArrayOutputStream(GZ_BYTES.length * 3);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
tripleGzByteStream.write(gzBytes);
|
tripleGzByteStream.write(GZ_BYTES);
|
||||||
|
|
||||||
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(expected.length * 3)) {
|
try (ByteArrayOutputStream bos = new ByteArrayOutputStream(EXPECTED.length * 3)) {
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
expected.length * 3,
|
EXPECTED.length * 3,
|
||||||
CompressionUtils.gunzip(
|
CompressionUtils.gunzip(
|
||||||
new ZeroRemainingInputStream(
|
new ZeroRemainingInputStream(
|
||||||
new ByteArrayInputStream(tripleGzByteStream.toByteArray())
|
new ByteArrayInputStream(tripleGzByteStream.toByteArray())
|
||||||
|
@ -520,10 +520,10 @@ public class CompressionUtilsTest
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
final byte[] found = bos.toByteArray();
|
final byte[] found = bos.toByteArray();
|
||||||
Assert.assertEquals(expected.length * 3, found.length);
|
Assert.assertEquals(EXPECTED.length * 3, found.length);
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 0, expected.length * 1));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 0, EXPECTED.length * 1));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 1, expected.length * 2));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 1, EXPECTED.length * 2));
|
||||||
Assert.assertArrayEquals(expected, Arrays.copyOfRange(found, expected.length * 2, expected.length * 3));
|
Assert.assertArrayEquals(EXPECTED, Arrays.copyOfRange(found, EXPECTED.length * 2, EXPECTED.length * 3));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -704,7 +704,7 @@ public class CompressionUtilsTest
|
||||||
@Override
|
@Override
|
||||||
public int read(byte b[]) throws IOException
|
public int read(byte b[]) throws IOException
|
||||||
{
|
{
|
||||||
final int len = Math.min(b.length, gzBytes.length - pos.get() % gzBytes.length);
|
final int len = Math.min(b.length, GZ_BYTES.length - pos.get() % GZ_BYTES.length);
|
||||||
pos.addAndGet(len);
|
pos.addAndGet(len);
|
||||||
return read(b, 0, len);
|
return read(b, 0, len);
|
||||||
}
|
}
|
||||||
|
@ -719,7 +719,7 @@ public class CompressionUtilsTest
|
||||||
@Override
|
@Override
|
||||||
public int read(byte b[], int off, int len) throws IOException
|
public int read(byte b[], int off, int len) throws IOException
|
||||||
{
|
{
|
||||||
final int l = Math.min(len, gzBytes.length - pos.get() % gzBytes.length);
|
final int l = Math.min(len, GZ_BYTES.length - pos.get() % GZ_BYTES.length);
|
||||||
pos.addAndGet(l);
|
pos.addAndGet(l);
|
||||||
return super.read(b, off, l);
|
return super.read(b, off, l);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
public class RetryUtilsTest
|
public class RetryUtilsTest
|
||||||
{
|
{
|
||||||
private static final Predicate<Throwable> isTransient = new Predicate<Throwable>()
|
private static final Predicate<Throwable> IS_TRANSIENT = new Predicate<Throwable>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public boolean apply(Throwable e)
|
public boolean apply(Throwable e)
|
||||||
|
@ -46,7 +46,7 @@ public class RetryUtilsTest
|
||||||
count.incrementAndGet();
|
count.incrementAndGet();
|
||||||
return "hey";
|
return "hey";
|
||||||
},
|
},
|
||||||
isTransient,
|
IS_TRANSIENT,
|
||||||
2
|
2
|
||||||
);
|
);
|
||||||
Assert.assertEquals("result", "hey", result);
|
Assert.assertEquals("result", "hey", result);
|
||||||
|
@ -64,7 +64,7 @@ public class RetryUtilsTest
|
||||||
count.incrementAndGet();
|
count.incrementAndGet();
|
||||||
throw new IOException("what");
|
throw new IOException("what");
|
||||||
},
|
},
|
||||||
isTransient,
|
IS_TRANSIENT,
|
||||||
2
|
2
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -87,7 +87,7 @@ public class RetryUtilsTest
|
||||||
throw new IOException("what");
|
throw new IOException("what");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
isTransient,
|
IS_TRANSIENT,
|
||||||
3
|
3
|
||||||
);
|
);
|
||||||
Assert.assertEquals("result", "hey", result);
|
Assert.assertEquals("result", "hey", result);
|
||||||
|
@ -108,7 +108,7 @@ public class RetryUtilsTest
|
||||||
throw new IOException("uhh");
|
throw new IOException("uhh");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
isTransient,
|
IS_TRANSIENT,
|
||||||
3
|
3
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
*/
|
*/
|
||||||
public class LifecycleTest
|
public class LifecycleTest
|
||||||
{
|
{
|
||||||
private static final Lifecycle.Handler dummyHandler = new Lifecycle.Handler()
|
private static final Lifecycle.Handler DUMMY_HANDLER = new Lifecycle.Handler()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public void start()
|
public void start()
|
||||||
|
@ -319,7 +319,7 @@ public class LifecycleTest
|
||||||
reachedStop.await();
|
reachedStop.await();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
lifecycle.addHandler(dummyHandler);
|
lifecycle.addHandler(DUMMY_HANDLER);
|
||||||
Assert.fail("Expected exception");
|
Assert.fail("Expected exception");
|
||||||
}
|
}
|
||||||
catch (IllegalStateException e) {
|
catch (IllegalStateException e) {
|
||||||
|
@ -327,7 +327,7 @@ public class LifecycleTest
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
lifecycle.addMaybeStartHandler(dummyHandler);
|
lifecycle.addMaybeStartHandler(DUMMY_HANDLER);
|
||||||
Assert.fail("Expected exception");
|
Assert.fail("Expected exception");
|
||||||
}
|
}
|
||||||
catch (IllegalStateException e) {
|
catch (IllegalStateException e) {
|
||||||
|
|
|
@ -49,7 +49,7 @@ public class FlatTextFormatParserTest
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final FlatTextFormatParserFactory parserFactory = new FlatTextFormatParserFactory();
|
private static final FlatTextFormatParserFactory PARSER_FACTORY = new FlatTextFormatParserFactory();
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public ExpectedException expectedException = ExpectedException.none();
|
public ExpectedException expectedException = ExpectedException.none();
|
||||||
|
@ -65,7 +65,7 @@ public class FlatTextFormatParserTest
|
||||||
public void testValidHeader()
|
public void testValidHeader()
|
||||||
{
|
{
|
||||||
final String header = concat(format, "time", "value1", "value2");
|
final String header = concat(format, "time", "value1", "value2");
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, header);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, header);
|
||||||
Assert.assertEquals(ImmutableList.of("time", "value1", "value2"), parser.getFieldNames());
|
Assert.assertEquals(ImmutableList.of("time", "value1", "value2"), parser.getFieldNames());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,14 +77,14 @@ public class FlatTextFormatParserTest
|
||||||
expectedException.expect(ParseException.class);
|
expectedException.expect(ParseException.class);
|
||||||
expectedException.expectMessage(StringUtils.format("Unable to parse header [%s]", header));
|
expectedException.expectMessage(StringUtils.format("Unable to parse header [%s]", header));
|
||||||
|
|
||||||
parserFactory.get(format, header);
|
PARSER_FACTORY.get(format, header);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWithHeader()
|
public void testWithHeader()
|
||||||
{
|
{
|
||||||
final String header = concat(format, "time", "value1", "value2");
|
final String header = concat(format, "time", "value1", "value2");
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, header);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, header);
|
||||||
final String body = concat(format, "hello", "world", "foo");
|
final String body = concat(format, "hello", "world", "foo");
|
||||||
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
|
@ -97,7 +97,7 @@ public class FlatTextFormatParserTest
|
||||||
@Test
|
@Test
|
||||||
public void testWithoutHeader()
|
public void testWithoutHeader()
|
||||||
{
|
{
|
||||||
final Parser<String, Object> parser = parserFactory.get(format);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format);
|
||||||
final String body = concat(format, "hello", "world", "foo");
|
final String body = concat(format, "hello", "world", "foo");
|
||||||
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
final Map<String, Object> jsonMap = parser.parseToMap(body);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
|
@ -111,7 +111,7 @@ public class FlatTextFormatParserTest
|
||||||
public void testWithSkipHeaderRows()
|
public void testWithSkipHeaderRows()
|
||||||
{
|
{
|
||||||
final int skipHeaderRows = 2;
|
final int skipHeaderRows = 2;
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, false, skipHeaderRows);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, false, skipHeaderRows);
|
||||||
parser.startFileFromBeginning();
|
parser.startFileFromBeginning();
|
||||||
final String[] body = new String[]{
|
final String[] body = new String[]{
|
||||||
concat(format, "header", "line", "1"),
|
concat(format, "header", "line", "1"),
|
||||||
|
@ -133,7 +133,7 @@ public class FlatTextFormatParserTest
|
||||||
@Test
|
@Test
|
||||||
public void testWithHeaderRow()
|
public void testWithHeaderRow()
|
||||||
{
|
{
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||||
parser.startFileFromBeginning();
|
parser.startFileFromBeginning();
|
||||||
final String[] body = new String[]{
|
final String[] body = new String[]{
|
||||||
concat(format, "time", "value1", "value2"),
|
concat(format, "time", "value1", "value2"),
|
||||||
|
@ -151,7 +151,7 @@ public class FlatTextFormatParserTest
|
||||||
@Test
|
@Test
|
||||||
public void testWithHeaderRowOfEmptyColumns()
|
public void testWithHeaderRowOfEmptyColumns()
|
||||||
{
|
{
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||||
parser.startFileFromBeginning();
|
parser.startFileFromBeginning();
|
||||||
final String[] body = new String[]{
|
final String[] body = new String[]{
|
||||||
concat(format, "time", "", "value2", ""),
|
concat(format, "time", "", "value2", ""),
|
||||||
|
@ -169,7 +169,7 @@ public class FlatTextFormatParserTest
|
||||||
@Test
|
@Test
|
||||||
public void testWithDifferentHeaderRows()
|
public void testWithDifferentHeaderRows()
|
||||||
{
|
{
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, true, 0);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, true, 0);
|
||||||
parser.startFileFromBeginning();
|
parser.startFileFromBeginning();
|
||||||
final String[] body = new String[]{
|
final String[] body = new String[]{
|
||||||
concat(format, "time", "value1", "value2"),
|
concat(format, "time", "value1", "value2"),
|
||||||
|
@ -206,7 +206,7 @@ public class FlatTextFormatParserTest
|
||||||
);
|
);
|
||||||
|
|
||||||
final int skipHeaderRows = 2;
|
final int skipHeaderRows = 2;
|
||||||
final Parser<String, Object> parser = parserFactory.get(format, false, skipHeaderRows);
|
final Parser<String, Object> parser = PARSER_FACTORY.get(format, false, skipHeaderRows);
|
||||||
final String[] body = new String[]{
|
final String[] body = new String[]{
|
||||||
concat(format, "header", "line", "1"),
|
concat(format, "header", "line", "1"),
|
||||||
concat(format, "header", "line", "2"),
|
concat(format, "header", "line", "2"),
|
||||||
|
|
|
@ -32,13 +32,13 @@ import java.util.Map;
|
||||||
|
|
||||||
public class JSONPathParserTest
|
public class JSONPathParserTest
|
||||||
{
|
{
|
||||||
private static final String json =
|
private static final String JSON =
|
||||||
"{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}";
|
"{\"one\": \"foo\", \"two\" : [\"bar\", \"baz\"], \"three\" : \"qux\", \"four\" : null}";
|
||||||
private static final String numbersJson =
|
private static final String NUMBERS_JSON =
|
||||||
"{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}";
|
"{\"five\" : 5.0, \"six\" : 6, \"many\" : 1234567878900, \"toomany\" : 1234567890000000000000}";
|
||||||
private static final String whackyCharacterJson =
|
private static final String WHACKY_CHARACTER_JSON =
|
||||||
"{\"one\": \"foo\\uD900\"}";
|
"{\"one\": \"foo\\uD900\"}";
|
||||||
private static final String nestedJson =
|
private static final String NESTED_JSON =
|
||||||
"{\"simpleVal\":\"text\", \"ignore_me\":[1, {\"x\":2}], \"blah\":[4,5,6], \"newmet\":5, " +
|
"{\"simpleVal\":\"text\", \"ignore_me\":[1, {\"x\":2}], \"blah\":[4,5,6], \"newmet\":5, " +
|
||||||
"\"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, " +
|
"\"foo\":{\"bar1\":\"aaa\", \"bar2\":\"bbb\"}, " +
|
||||||
"\"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", " +
|
"\"baz\":[1,2,3], \"timestamp\":\"2999\", \"foo.bar1\":\"Hello world!\", " +
|
||||||
|
@ -47,7 +47,7 @@ public class JSONPathParserTest
|
||||||
"\"testMapConvert\":{\"big\": 1234567890000000000000, \"big2\":{\"big2\":1234567890000000000000}}, " +
|
"\"testMapConvert\":{\"big\": 1234567890000000000000, \"big2\":{\"big2\":1234567890000000000000}}, " +
|
||||||
"\"testEmptyList\": [], " +
|
"\"testEmptyList\": [], " +
|
||||||
"\"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":[7,8,9]}}";
|
"\"hey\":[{\"barx\":\"asdf\"}], \"met\":{\"a\":[7,8,9]}}";
|
||||||
private static final String notJson = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK";
|
private static final String NOT_JSON = "***@#%R#*(TG@(*H(#@(#@((H#(@TH@(#TH(@SDHGKJDSKJFBSBJK";
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public ExpectedException thrown = ExpectedException.none();
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
|
@ -57,7 +57,7 @@ public class JSONPathParserTest
|
||||||
{
|
{
|
||||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(json);
|
final Map<String, Object> jsonMap = jsonParser.parseToMap(JSON);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
"jsonMap",
|
"jsonMap",
|
||||||
ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"),
|
ImmutableMap.of("one", "foo", "two", ImmutableList.of("bar", "baz"), "three", "qux"),
|
||||||
|
@ -70,7 +70,7 @@ public class JSONPathParserTest
|
||||||
{
|
{
|
||||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(numbersJson);
|
final Map<String, Object> jsonMap = jsonParser.parseToMap(NUMBERS_JSON);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
"jsonMap",
|
"jsonMap",
|
||||||
ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21),
|
ImmutableMap.of("five", 5.0, "six", 6L, "many", 1234567878900L, "toomany", 1.23456789E21),
|
||||||
|
@ -83,7 +83,7 @@ public class JSONPathParserTest
|
||||||
{
|
{
|
||||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(whackyCharacterJson);
|
final Map<String, Object> jsonMap = jsonParser.parseToMap(WHACKY_CHARACTER_JSON);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
"jsonMap",
|
"jsonMap",
|
||||||
ImmutableMap.of("one", "foo?"),
|
ImmutableMap.of("one", "foo?"),
|
||||||
|
@ -113,7 +113,7 @@ public class JSONPathParserTest
|
||||||
|
|
||||||
|
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(nestedJson);
|
final Map<String, Object> jsonMap = jsonParser.parseToMap(NESTED_JSON);
|
||||||
|
|
||||||
// Root fields
|
// Root fields
|
||||||
Assert.assertEquals(ImmutableList.of(1L, 2L, 3L), jsonMap.get("baz"));
|
Assert.assertEquals(ImmutableList.of(1L, 2L, 3L), jsonMap.get("baz"));
|
||||||
|
@ -174,7 +174,7 @@ public class JSONPathParserTest
|
||||||
fields.add(new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq-met-array", ".met.a"));
|
fields.add(new JSONPathFieldSpec(JSONPathFieldType.JQ, "jq-met-array", ".met.a"));
|
||||||
|
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||||
final Map<String, Object> jsonMap = jsonParser.parseToMap(nestedJson);
|
final Map<String, Object> jsonMap = jsonParser.parseToMap(NESTED_JSON);
|
||||||
|
|
||||||
// Root fields
|
// Root fields
|
||||||
Assert.assertEquals("text", jsonMap.get("simpleVal"));
|
Assert.assertEquals("text", jsonMap.get("simpleVal"));
|
||||||
|
@ -211,7 +211,7 @@ public class JSONPathParserTest
|
||||||
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
||||||
|
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||||
jsonParser.parseToMap(nestedJson);
|
jsonParser.parseToMap(NESTED_JSON);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -225,7 +225,7 @@ public class JSONPathParserTest
|
||||||
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
thrown.expectMessage("Cannot have duplicate field definition: met-array");
|
||||||
|
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(false, fields), null);
|
||||||
jsonParser.parseToMap(nestedJson);
|
jsonParser.parseToMap(NESTED_JSON);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -234,9 +234,9 @@ public class JSONPathParserTest
|
||||||
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
List<JSONPathFieldSpec> fields = new ArrayList<>();
|
||||||
|
|
||||||
thrown.expect(ParseException.class);
|
thrown.expect(ParseException.class);
|
||||||
thrown.expectMessage("Unable to parse row [" + notJson + "]");
|
thrown.expectMessage("Unable to parse row [" + NOT_JSON + "]");
|
||||||
|
|
||||||
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
final Parser<String, Object> jsonParser = new JSONPathParser(new JSONPathSpec(true, fields), null);
|
||||||
jsonParser.parseToMap(notJson);
|
jsonParser.parseToMap(NOT_JSON);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,7 @@ import java.util.stream.Stream;
|
||||||
*/
|
*/
|
||||||
public class EmitterTest
|
public class EmitterTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
public static String TARGET_URL = "http://metrics.foo.bar/";
|
public static String TARGET_URL = "http://metrics.foo.bar/";
|
||||||
public static final Response OK_RESPONSE = Stream
|
public static final Response OK_RESPONSE = Stream
|
||||||
.of(responseBuilder(HttpVersion.HTTP_1_1, HttpResponseStatus.CREATED))
|
.of(responseBuilder(HttpVersion.HTTP_1_1, HttpResponseStatus.CREATED))
|
||||||
|
@ -120,7 +120,7 @@ public class EmitterTest
|
||||||
HttpPostEmitter emitter = new HttpPostEmitter(
|
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||||
config,
|
config,
|
||||||
httpClient,
|
httpClient,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
);
|
);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
return emitter;
|
return emitter;
|
||||||
|
@ -135,7 +135,7 @@ public class EmitterTest
|
||||||
HttpPostEmitter emitter = new HttpPostEmitter(
|
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||||
config,
|
config,
|
||||||
httpClient,
|
httpClient,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
);
|
);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
return emitter;
|
return emitter;
|
||||||
|
@ -150,7 +150,7 @@ public class EmitterTest
|
||||||
props.setProperty("org.apache.druid.java.util.emitter.flushCount", String.valueOf(size));
|
props.setProperty("org.apache.druid.java.util.emitter.flushCount", String.valueOf(size));
|
||||||
|
|
||||||
Lifecycle lifecycle = new Lifecycle();
|
Lifecycle lifecycle = new Lifecycle();
|
||||||
Emitter emitter = Emitters.create(props, httpClient, jsonMapper, lifecycle);
|
Emitter emitter = Emitters.create(props, httpClient, JSON_MAPPER, lifecycle);
|
||||||
Assert.assertTrue(StringUtils.format(
|
Assert.assertTrue(StringUtils.format(
|
||||||
"HttpPostEmitter emitter should be created, but found %s",
|
"HttpPostEmitter emitter should be created, but found %s",
|
||||||
emitter.getClass().getName()
|
emitter.getClass().getName()
|
||||||
|
@ -169,7 +169,7 @@ public class EmitterTest
|
||||||
HttpPostEmitter emitter = new HttpPostEmitter(
|
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||||
config,
|
config,
|
||||||
httpClient,
|
httpClient,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
);
|
);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
return emitter;
|
return emitter;
|
||||||
|
@ -187,7 +187,7 @@ public class EmitterTest
|
||||||
HttpPostEmitter emitter = new HttpPostEmitter(
|
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||||
config,
|
config,
|
||||||
httpClient,
|
httpClient,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
);
|
);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
return emitter;
|
return emitter;
|
||||||
|
@ -203,7 +203,7 @@ public class EmitterTest
|
||||||
HttpPostEmitter emitter = new HttpPostEmitter(
|
HttpPostEmitter emitter = new HttpPostEmitter(
|
||||||
config,
|
config,
|
||||||
httpClient,
|
httpClient,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
);
|
);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
return emitter;
|
return emitter;
|
||||||
|
@ -232,8 +232,8 @@ public class EmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
@ -274,8 +274,8 @@ public class EmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
@ -459,8 +459,8 @@ public class EmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"%s\n%s\n",
|
"%s\n%s\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
@ -513,8 +513,8 @@ public class EmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(counter.getAndIncrement())),
|
JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement())),
|
||||||
jsonMapper.writeValueAsString(events.get(counter.getAndIncrement()))
|
JSON_MAPPER.writeValueAsString(events.get(counter.getAndIncrement()))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
@ -576,8 +576,8 @@ public class EmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
baos.toString(StandardCharsets.UTF_8.name())
|
baos.toString(StandardCharsets.UTF_8.name())
|
||||||
);
|
);
|
||||||
|
|
|
@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicLong;
|
||||||
public class HttpEmitterTest
|
public class HttpEmitterTest
|
||||||
{
|
{
|
||||||
private final MockHttpClient httpClient = new MockHttpClient();
|
private final MockHttpClient httpClient = new MockHttpClient();
|
||||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public byte[] writeValueAsBytes(Object value)
|
public byte[] writeValueAsBytes(Object value)
|
||||||
|
@ -71,7 +71,7 @@ public class HttpEmitterTest
|
||||||
.setBatchingStrategy(BatchingStrategy.ONLY_EVENTS)
|
.setBatchingStrategy(BatchingStrategy.ONLY_EVENTS)
|
||||||
.setHttpTimeoutAllowanceFactor(timeoutAllowanceFactor)
|
.setHttpTimeoutAllowanceFactor(timeoutAllowanceFactor)
|
||||||
.build();
|
.build();
|
||||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||||
|
|
||||||
long startMs = System.currentTimeMillis();
|
long startMs = System.currentTimeMillis();
|
||||||
emitter.start();
|
emitter.start();
|
||||||
|
|
|
@ -42,7 +42,7 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||||
public class HttpPostEmitterStressTest
|
public class HttpPostEmitterStressTest
|
||||||
{
|
{
|
||||||
private static final int N = 10_000;
|
private static final int N = 10_000;
|
||||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public byte[] writeValueAsBytes(Object value)
|
public byte[] writeValueAsBytes(Object value)
|
||||||
|
@ -64,7 +64,7 @@ public class HttpPostEmitterStressTest
|
||||||
// For this test, we don't need any batches to be dropped, i. e. "gaps" in data
|
// For this test, we don't need any batches to be dropped, i. e. "gaps" in data
|
||||||
.setBatchQueueSizeLimit(1000)
|
.setBatchQueueSizeLimit(1000)
|
||||||
.build();
|
.build();
|
||||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||||
int nThreads = Runtime.getRuntime().availableProcessors() * 2;
|
int nThreads = Runtime.getRuntime().availableProcessors() * 2;
|
||||||
final List<IntList> eventsPerThread = new ArrayList<>(nThreads);
|
final List<IntList> eventsPerThread = new ArrayList<>(nThreads);
|
||||||
final List<List<Batch>> eventBatchesPerThread = new ArrayList<>(nThreads);
|
final List<List<Batch>> eventBatchesPerThread = new ArrayList<>(nThreads);
|
||||||
|
|
|
@ -35,7 +35,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||||
public class HttpPostEmitterTest
|
public class HttpPostEmitterTest
|
||||||
{
|
{
|
||||||
|
|
||||||
private static final ObjectMapper objectMapper = new ObjectMapper()
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public byte[] writeValueAsBytes(Object value)
|
public byte[] writeValueAsBytes(Object value)
|
||||||
|
@ -72,7 +72,7 @@ public class HttpPostEmitterTest
|
||||||
.setMaxBatchSize(1024 * 1024)
|
.setMaxBatchSize(1024 * 1024)
|
||||||
.setBatchQueueSizeLimit(1000)
|
.setBatchQueueSizeLimit(1000)
|
||||||
.build();
|
.build();
|
||||||
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, objectMapper);
|
final HttpPostEmitter emitter = new HttpPostEmitter(config, httpClient, OBJECT_MAPPER);
|
||||||
emitter.start();
|
emitter.start();
|
||||||
|
|
||||||
// emit first event
|
// emit first event
|
||||||
|
|
|
@ -42,7 +42,7 @@ import java.util.Properties;
|
||||||
|
|
||||||
public class ParametrizedUriEmitterTest
|
public class ParametrizedUriEmitterTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
private MockHttpClient httpClient;
|
private MockHttpClient httpClient;
|
||||||
private Lifecycle lifecycle;
|
private Lifecycle lifecycle;
|
||||||
|
@ -98,8 +98,8 @@ public class ParametrizedUriEmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
@ -148,8 +148,8 @@ public class ParametrizedUriEmitterTest
|
||||||
emitter.flush();
|
emitter.flush();
|
||||||
Assert.assertTrue(httpClient.succeeded());
|
Assert.assertTrue(httpClient.succeeded());
|
||||||
Map<String, String> expected = ImmutableMap.of(
|
Map<String, String> expected = ImmutableMap.of(
|
||||||
"http://example.com/test1", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(0))),
|
"http://example.com/test1", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(0))),
|
||||||
"http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1)))
|
"http://example.com/test2", StringUtils.format("[%s]\n", JSON_MAPPER.writeValueAsString(events.get(1)))
|
||||||
);
|
);
|
||||||
Assert.assertEquals(expected, results);
|
Assert.assertEquals(expected, results);
|
||||||
}
|
}
|
||||||
|
@ -173,8 +173,8 @@ public class ParametrizedUriEmitterTest
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"[%s,%s]\n",
|
"[%s,%s]\n",
|
||||||
jsonMapper.writeValueAsString(events.get(0)),
|
JSON_MAPPER.writeValueAsString(events.get(0)),
|
||||||
jsonMapper.writeValueAsString(events.get(1))
|
JSON_MAPPER.writeValueAsString(events.get(1))
|
||||||
),
|
),
|
||||||
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
|
||||||
);
|
);
|
||||||
|
|
|
@ -25,46 +25,46 @@ import org.junit.Test;
|
||||||
|
|
||||||
public class DefaultPasswordProviderTest
|
public class DefaultPasswordProviderTest
|
||||||
{
|
{
|
||||||
private static final String pwd = "nothing";
|
private static final String PWD = "nothing";
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testExplicitConstruction()
|
public void testExplicitConstruction()
|
||||||
{
|
{
|
||||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd);
|
DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD);
|
||||||
Assert.assertEquals(pwd, pp.getPassword());
|
Assert.assertEquals(PWD, pp.getPassword());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFromStringConstruction()
|
public void testFromStringConstruction()
|
||||||
{
|
{
|
||||||
DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(pwd);
|
DefaultPasswordProvider pp = DefaultPasswordProvider.fromString(PWD);
|
||||||
Assert.assertEquals(pwd, pp.getPassword());
|
Assert.assertEquals(PWD, pp.getPassword());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeserializationFromJsonString() throws Exception
|
public void testDeserializationFromJsonString() throws Exception
|
||||||
{
|
{
|
||||||
PasswordProvider pp = jsonMapper.readValue("\"" + pwd + "\"",
|
PasswordProvider pp = JSON_MAPPER.readValue("\"" + PWD + "\"",
|
||||||
PasswordProvider.class);
|
PasswordProvider.class);
|
||||||
Assert.assertEquals(pwd, pp.getPassword());
|
Assert.assertEquals(PWD, pp.getPassword());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeserializationFromJson() throws Exception
|
public void testDeserializationFromJson() throws Exception
|
||||||
{
|
{
|
||||||
PasswordProvider pp = jsonMapper.readValue(
|
PasswordProvider pp = JSON_MAPPER.readValue(
|
||||||
"{\"type\": \"default\", \"password\": \"" + pwd + "\"}",
|
"{\"type\": \"default\", \"password\": \"" + PWD + "\"}",
|
||||||
PasswordProvider.class);
|
PasswordProvider.class);
|
||||||
Assert.assertEquals(pwd, pp.getPassword());
|
Assert.assertEquals(PWD, pp.getPassword());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerializationWithMixIn() throws Exception
|
public void testSerializationWithMixIn() throws Exception
|
||||||
{
|
{
|
||||||
DefaultPasswordProvider pp = new DefaultPasswordProvider(pwd);
|
DefaultPasswordProvider pp = new DefaultPasswordProvider(PWD);
|
||||||
jsonMapper.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class);
|
JSON_MAPPER.addMixIn(PasswordProvider.class, PasswordProviderRedactionMixIn.class);
|
||||||
String valueAsString = jsonMapper.writeValueAsString(pp);
|
String valueAsString = JSON_MAPPER.writeValueAsString(pp);
|
||||||
Assert.assertEquals("{\"type\":\"default\"}", valueAsString);
|
Assert.assertEquals("{\"type\":\"default\"}", valueAsString);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,16 +27,16 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class EnvironmentVariablePasswordProviderTest
|
public class EnvironmentVariablePasswordProviderTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerde() throws IOException
|
public void testSerde() throws IOException
|
||||||
{
|
{
|
||||||
String providerString = "{\"type\": \"environment\", \"variable\" : \"test\"}";
|
String providerString = "{\"type\": \"environment\", \"variable\" : \"test\"}";
|
||||||
PasswordProvider provider = jsonMapper.readValue(providerString, PasswordProvider.class);
|
PasswordProvider provider = JSON_MAPPER.readValue(providerString, PasswordProvider.class);
|
||||||
Assert.assertTrue(provider instanceof EnvironmentVariablePasswordProvider);
|
Assert.assertTrue(provider instanceof EnvironmentVariablePasswordProvider);
|
||||||
Assert.assertEquals("test", ((EnvironmentVariablePasswordProvider) provider).getVariable());
|
Assert.assertEquals("test", ((EnvironmentVariablePasswordProvider) provider).getVariable());
|
||||||
PasswordProvider serde = jsonMapper.readValue(jsonMapper.writeValueAsString(provider), PasswordProvider.class);
|
PasswordProvider serde = JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(provider), PasswordProvider.class);
|
||||||
Assert.assertEquals(provider, serde);
|
Assert.assertEquals(provider, serde);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,7 @@ public class MetadataStorageConnectorConfigTest
|
||||||
)
|
)
|
||||||
throws IOException
|
throws IOException
|
||||||
{
|
{
|
||||||
return jsonMapper.readValue(
|
return JSON_MAPPER.readValue(
|
||||||
"{" +
|
"{" +
|
||||||
"\"createTables\": \"" + createTables + "\"," +
|
"\"createTables\": \"" + createTables + "\"," +
|
||||||
"\"host\": \"" + host + "\"," +
|
"\"host\": \"" + host + "\"," +
|
||||||
|
@ -79,7 +79,7 @@ public class MetadataStorageConnectorConfigTest
|
||||||
Assert.assertTrue(metadataStorageConnectorConfig.hashCode() == metadataStorageConnectorConfig2.hashCode());
|
Assert.assertTrue(metadataStorageConnectorConfig.hashCode() == metadataStorageConnectorConfig2.hashCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMetadataStorageConnectionConfigSimplePassword() throws Exception
|
public void testMetadataStorageConnectionConfigSimplePassword() throws Exception
|
||||||
|
@ -119,7 +119,7 @@ public class MetadataStorageConnectorConfigTest
|
||||||
String pwd
|
String pwd
|
||||||
) throws Exception
|
) throws Exception
|
||||||
{
|
{
|
||||||
MetadataStorageConnectorConfig config = jsonMapper.readValue(
|
MetadataStorageConnectorConfig config = JSON_MAPPER.readValue(
|
||||||
"{" +
|
"{" +
|
||||||
"\"createTables\": \"" + createTables + "\"," +
|
"\"createTables\": \"" + createTables + "\"," +
|
||||||
"\"host\": \"" + host + "\"," +
|
"\"host\": \"" + host + "\"," +
|
||||||
|
@ -162,7 +162,7 @@ public class MetadataStorageConnectorConfigTest
|
||||||
String pwd
|
String pwd
|
||||||
) throws Exception
|
) throws Exception
|
||||||
{
|
{
|
||||||
MetadataStorageConnectorConfig config = jsonMapper.readValue(
|
MetadataStorageConnectorConfig config = JSON_MAPPER.readValue(
|
||||||
"{" +
|
"{" +
|
||||||
"\"createTables\": \"" + createTables + "\"," +
|
"\"createTables\": \"" + createTables + "\"," +
|
||||||
"\"host\": \"" + host + "\"," +
|
"\"host\": \"" + host + "\"," +
|
||||||
|
|
|
@ -51,7 +51,7 @@ import java.util.TreeSet;
|
||||||
*/
|
*/
|
||||||
public class DataSegmentTest
|
public class DataSegmentTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper mapper = new TestObjectMapper();
|
private static final ObjectMapper MAPPER = new TestObjectMapper();
|
||||||
private static final int TEST_VERSION = 0x9;
|
private static final int TEST_VERSION = 0x9;
|
||||||
|
|
||||||
private static ShardSpec getShardSpec(final int partitionNum)
|
private static ShardSpec getShardSpec(final int partitionNum)
|
||||||
|
@ -107,7 +107,7 @@ public class DataSegmentTest
|
||||||
{
|
{
|
||||||
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
||||||
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
||||||
mapper.setInjectableValues(injectableValues);
|
MAPPER.setInjectableValues(injectableValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -129,8 +129,8 @@ public class DataSegmentTest
|
||||||
1
|
1
|
||||||
);
|
);
|
||||||
|
|
||||||
final Map<String, Object> objectMap = mapper.readValue(
|
final Map<String, Object> objectMap = MAPPER.readValue(
|
||||||
mapper.writeValueAsString(segment),
|
MAPPER.writeValueAsString(segment),
|
||||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ public class DataSegmentTest
|
||||||
Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion"));
|
Assert.assertEquals(TEST_VERSION, objectMap.get("binaryVersion"));
|
||||||
Assert.assertEquals(1, objectMap.get("size"));
|
Assert.assertEquals(1, objectMap.get("size"));
|
||||||
|
|
||||||
DataSegment deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
DataSegment deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||||
|
|
||||||
Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource());
|
Assert.assertEquals(segment.getDataSource(), deserializedSegment.getDataSource());
|
||||||
Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval());
|
Assert.assertEquals(segment.getInterval(), deserializedSegment.getInterval());
|
||||||
|
@ -157,13 +157,13 @@ public class DataSegmentTest
|
||||||
Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
|
Assert.assertEquals(segment.getSize(), deserializedSegment.getSize());
|
||||||
Assert.assertEquals(segment.getId(), deserializedSegment.getId());
|
Assert.assertEquals(segment.getId(), deserializedSegment.getId());
|
||||||
|
|
||||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||||
Assert.assertEquals(0, segment.compareTo(deserializedSegment));
|
Assert.assertEquals(0, segment.compareTo(deserializedSegment));
|
||||||
|
|
||||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||||
Assert.assertEquals(0, deserializedSegment.compareTo(segment));
|
Assert.assertEquals(0, deserializedSegment.compareTo(segment));
|
||||||
|
|
||||||
deserializedSegment = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
deserializedSegment = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||||
Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode());
|
Assert.assertEquals(segment.hashCode(), deserializedSegment.hashCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +224,7 @@ public class DataSegmentTest
|
||||||
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
.version(DateTimes.of("2012-01-01T11:22:33.444Z").toString())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
final DataSegment segment2 = mapper.readValue(mapper.writeValueAsString(segment), DataSegment.class);
|
final DataSegment segment2 = MAPPER.readValue(MAPPER.writeValueAsString(segment), DataSegment.class);
|
||||||
Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions());
|
Assert.assertEquals("empty dimensions", ImmutableList.of(), segment2.getDimensions());
|
||||||
Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics());
|
Assert.assertEquals("empty metrics", ImmutableList.of(), segment2.getMetrics());
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ import java.util.Map;
|
||||||
|
|
||||||
public class SegmentWithOvershadowedStatusTest
|
public class SegmentWithOvershadowedStatusTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper mapper = new TestObjectMapper();
|
private static final ObjectMapper MAPPER = new TestObjectMapper();
|
||||||
private static final int TEST_VERSION = 0x9;
|
private static final int TEST_VERSION = 0x9;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
@ -51,7 +51,7 @@ public class SegmentWithOvershadowedStatusTest
|
||||||
{
|
{
|
||||||
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
||||||
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
||||||
mapper.setInjectableValues(injectableValues);
|
MAPPER.setInjectableValues(injectableValues);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -74,8 +74,8 @@ public class SegmentWithOvershadowedStatusTest
|
||||||
|
|
||||||
final SegmentWithOvershadowedStatus segment = new SegmentWithOvershadowedStatus(dataSegment, false);
|
final SegmentWithOvershadowedStatus segment = new SegmentWithOvershadowedStatus(dataSegment, false);
|
||||||
|
|
||||||
final Map<String, Object> objectMap = mapper.readValue(
|
final Map<String, Object> objectMap = MAPPER.readValue(
|
||||||
mapper.writeValueAsString(segment),
|
MAPPER.writeValueAsString(segment),
|
||||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -91,9 +91,9 @@ public class SegmentWithOvershadowedStatusTest
|
||||||
Assert.assertEquals(1, objectMap.get("size"));
|
Assert.assertEquals(1, objectMap.get("size"));
|
||||||
Assert.assertEquals(false, objectMap.get("overshadowed"));
|
Assert.assertEquals(false, objectMap.get("overshadowed"));
|
||||||
|
|
||||||
final String json = mapper.writeValueAsString(segment);
|
final String json = MAPPER.writeValueAsString(segment);
|
||||||
|
|
||||||
final TestSegmentWithOvershadowedStatus deserializedSegment = mapper.readValue(
|
final TestSegmentWithOvershadowedStatus deserializedSegment = MAPPER.readValue(
|
||||||
json,
|
json,
|
||||||
TestSegmentWithOvershadowedStatus.class
|
TestSegmentWithOvershadowedStatus.class
|
||||||
);
|
);
|
||||||
|
|
|
@ -39,8 +39,8 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
|
|
||||||
private static final String SEGMENT_FILE_NAME = "segment";
|
private static final String SEGMENT_FILE_NAME = "segment";
|
||||||
private static final String containerName = "container";
|
private static final String CONTAINER_NAME = "container";
|
||||||
private static final String blobPath = "/path/to/storage/index.zip";
|
private static final String BLOB_PATH = "/path/to/storage/index.zip";
|
||||||
private AzureStorage azureStorage;
|
private AzureStorage azureStorage;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
@ -58,13 +58,13 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
||||||
try {
|
try {
|
||||||
final InputStream zipStream = new FileInputStream(pulledFile);
|
final InputStream zipStream = new FileInputStream(pulledFile);
|
||||||
|
|
||||||
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream);
|
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andReturn(zipStream);
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
||||||
|
|
||||||
FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir);
|
FileUtils.FileCopyResult result = puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, toDir);
|
||||||
|
|
||||||
File expected = new File(toDir, SEGMENT_FILE_NAME);
|
File expected = new File(toDir, SEGMENT_FILE_NAME);
|
||||||
Assert.assertEquals(value.length(), result.size());
|
Assert.assertEquals(value.length(), result.size());
|
||||||
|
@ -86,7 +86,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
||||||
|
|
||||||
final File outDir = Files.createTempDirectory("druid").toFile();
|
final File outDir = Files.createTempDirectory("druid").toFile();
|
||||||
try {
|
try {
|
||||||
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
|
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER_NAME, BLOB_PATH)).andThrow(
|
||||||
new URISyntaxException(
|
new URISyntaxException(
|
||||||
"error",
|
"error",
|
||||||
"error",
|
"error",
|
||||||
|
@ -98,7 +98,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
|
||||||
|
|
||||||
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(azureStorage);
|
||||||
|
|
||||||
puller.getSegmentFiles(containerName, blobPath, outDir);
|
puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir);
|
||||||
|
|
||||||
Assert.assertFalse(outDir.exists());
|
Assert.assertFalse(outDir.exists());
|
||||||
|
|
||||||
|
|
|
@ -50,13 +50,13 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
||||||
@Rule
|
@Rule
|
||||||
public final TemporaryFolder tempFolder = new TemporaryFolder();
|
public final TemporaryFolder tempFolder = new TemporaryFolder();
|
||||||
|
|
||||||
private static final String containerName = "container";
|
private static final String CONTAINER_NAME = "container";
|
||||||
private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
private static final String BLOB_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
|
||||||
private static final DataSegment dataSegment = new DataSegment(
|
private static final DataSegment DATA_SEGMENT = new DataSegment(
|
||||||
"test",
|
"test",
|
||||||
Intervals.of("2015-04-12/2015-04-13"),
|
Intervals.of("2015-04-12/2015-04-13"),
|
||||||
"1",
|
"1",
|
||||||
ImmutableMap.of("containerName", containerName, "blobPath", blobPath),
|
ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
NoneShardSpec.instance(),
|
NoneShardSpec.instance(),
|
||||||
|
@ -129,8 +129,8 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
|
|
||||||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||||
final String storageDir = pusher.getStorageDir(dataSegment, false);
|
final String storageDir = pusher.getStorageDir(DATA_SEGMENT, false);
|
||||||
final String azurePath = pusher.getAzurePath(dataSegment, false);
|
final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false);
|
||||||
|
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME),
|
StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME),
|
||||||
|
@ -144,15 +144,15 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
||||||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||||
final int binaryVersion = 9;
|
final int binaryVersion = 9;
|
||||||
final File compressedSegmentData = new File("index.zip");
|
final File compressedSegmentData = new File("index.zip");
|
||||||
final String azurePath = pusher.getAzurePath(dataSegment, false);
|
final String azurePath = pusher.getAzurePath(DATA_SEGMENT, false);
|
||||||
|
|
||||||
azureStorage.uploadBlob(compressedSegmentData, containerName, azurePath);
|
azureStorage.uploadBlob(compressedSegmentData, CONTAINER_NAME, azurePath);
|
||||||
EasyMock.expectLastCall();
|
EasyMock.expectLastCall();
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
DataSegment pushedDataSegment = pusher.uploadDataSegment(
|
DataSegment pushedDataSegment = pusher.uploadDataSegment(
|
||||||
dataSegment,
|
DATA_SEGMENT,
|
||||||
binaryVersion,
|
binaryVersion,
|
||||||
0, // empty file
|
0, // empty file
|
||||||
compressedSegmentData,
|
compressedSegmentData,
|
||||||
|
@ -180,7 +180,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
|
||||||
public void storageDirContainsNoColonsTest()
|
public void storageDirContainsNoColonsTest()
|
||||||
{
|
{
|
||||||
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
AzureDataSegmentPusher pusher = new AzureDataSegmentPusher(azureStorage, azureAccountConfig);
|
||||||
DataSegment withColons = dataSegment.withVersion("2018-01-05T14:54:09.295Z");
|
DataSegment withColons = DATA_SEGMENT.withVersion("2018-01-05T14:54:09.295Z");
|
||||||
String segmentPath = pusher.getStorageDir(withColons, false);
|
String segmentPath = pusher.getStorageDir(withColons, false);
|
||||||
Assert.assertFalse("Path should not contain any columns", segmentPath.contains(":"));
|
Assert.assertFalse("Path should not contain any columns", segmentPath.contains(":"));
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,10 +39,10 @@ import java.nio.charset.StandardCharsets;
|
||||||
public class AzureTaskLogsTest extends EasyMockSupport
|
public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
|
|
||||||
private static final String container = "test";
|
private static final String CONTAINER = "test";
|
||||||
private static final String prefix = "test/log";
|
private static final String PREFIX = "test/log";
|
||||||
private static final String taskid = "taskid";
|
private static final String TASK_ID = "taskid";
|
||||||
private static final AzureTaskLogsConfig azureTaskLogsConfig = new AzureTaskLogsConfig(container, prefix, 3);
|
private static final AzureTaskLogsConfig AZURE_TASK_LOGS_CONFIG = new AzureTaskLogsConfig(CONTAINER, PREFIX, 3);
|
||||||
|
|
||||||
private AzureStorage azureStorage;
|
private AzureStorage azureStorage;
|
||||||
private AzureTaskLogs azureTaskLogs;
|
private AzureTaskLogs azureTaskLogs;
|
||||||
|
@ -51,7 +51,7 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
public void before()
|
public void before()
|
||||||
{
|
{
|
||||||
azureStorage = createMock(AzureStorage.class);
|
azureStorage = createMock(AzureStorage.class);
|
||||||
azureTaskLogs = new AzureTaskLogs(azureTaskLogsConfig, azureStorage);
|
azureTaskLogs = new AzureTaskLogs(AZURE_TASK_LOGS_CONFIG, azureStorage);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -63,12 +63,12 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
try {
|
try {
|
||||||
final File logFile = new File(tmpDir, "log");
|
final File logFile = new File(tmpDir, "log");
|
||||||
|
|
||||||
azureStorage.uploadBlob(logFile, container, prefix + "/" + taskid + "/log");
|
azureStorage.uploadBlob(logFile, CONTAINER, PREFIX + "/" + TASK_ID + "/log");
|
||||||
EasyMock.expectLastCall();
|
EasyMock.expectLastCall();
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
azureTaskLogs.pushTaskLog(taskid, logFile);
|
azureTaskLogs.pushTaskLog(TASK_ID, logFile);
|
||||||
|
|
||||||
verifyAll();
|
verifyAll();
|
||||||
}
|
}
|
||||||
|
@ -82,16 +82,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
final String testLog = "hello this is a log";
|
final String testLog = "hello this is a log";
|
||||||
|
|
||||||
final String blobPath = prefix + "/" + taskid + "/log";
|
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||||
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
||||||
|
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 0);
|
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 0);
|
||||||
|
|
||||||
final StringWriter writer = new StringWriter();
|
final StringWriter writer = new StringWriter();
|
||||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||||
|
@ -105,16 +105,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
final String testLog = "hello this is a log";
|
final String testLog = "hello this is a log";
|
||||||
|
|
||||||
final String blobPath = prefix + "/" + taskid + "/log";
|
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||||
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
|
||||||
|
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, 5);
|
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, 5);
|
||||||
|
|
||||||
final StringWriter writer = new StringWriter();
|
final StringWriter writer = new StringWriter();
|
||||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||||
|
@ -128,16 +128,16 @@ public class AzureTaskLogsTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
final String testLog = "hello this is a log";
|
final String testLog = "hello this is a log";
|
||||||
|
|
||||||
final String blobPath = prefix + "/" + taskid + "/log";
|
final String blobPath = PREFIX + "/" + TASK_ID + "/log";
|
||||||
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
|
EasyMock.expect(azureStorage.getBlobExists(CONTAINER, blobPath)).andReturn(true);
|
||||||
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
|
EasyMock.expect(azureStorage.getBlobLength(CONTAINER, blobPath)).andReturn((long) testLog.length());
|
||||||
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
|
EasyMock.expect(azureStorage.getBlobInputStream(CONTAINER, blobPath)).andReturn(
|
||||||
new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
|
new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
|
||||||
|
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(taskid, -3);
|
final Optional<ByteSource> byteSource = azureTaskLogs.streamTaskLog(TASK_ID, -3);
|
||||||
|
|
||||||
final StringWriter writer = new StringWriter();
|
final StringWriter writer = new StringWriter();
|
||||||
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap;
|
||||||
|
|
||||||
public class ConciseBitMapFactory implements BitMapFactory
|
public class ConciseBitMapFactory implements BitMapFactory
|
||||||
{
|
{
|
||||||
private static final BitmapFactory bitmapFactory = new ConciseBitmapFactory();
|
private static final BitmapFactory BITMAP_FACTORY = new ConciseBitmapFactory();
|
||||||
|
|
||||||
public ConciseBitMapFactory()
|
public ConciseBitMapFactory()
|
||||||
{
|
{
|
||||||
|
@ -34,7 +34,7 @@ public class ConciseBitMapFactory implements BitMapFactory
|
||||||
@Override
|
@Override
|
||||||
public MutableBitmap makeEmptyMutableBitmap()
|
public MutableBitmap makeEmptyMutableBitmap()
|
||||||
{
|
{
|
||||||
return bitmapFactory.makeEmptyMutableBitmap();
|
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.MutableBitmap;
|
||||||
|
|
||||||
public class JavaBitMapFactory implements BitMapFactory
|
public class JavaBitMapFactory implements BitMapFactory
|
||||||
{
|
{
|
||||||
private static final BitmapFactory bitmapFactory = new BitSetBitmapFactory();
|
private static final BitmapFactory BITMAP_FACTORY = new BitSetBitmapFactory();
|
||||||
|
|
||||||
public JavaBitMapFactory()
|
public JavaBitMapFactory()
|
||||||
{
|
{
|
||||||
|
@ -34,7 +34,7 @@ public class JavaBitMapFactory implements BitMapFactory
|
||||||
@Override
|
@Override
|
||||||
public MutableBitmap makeEmptyMutableBitmap()
|
public MutableBitmap makeEmptyMutableBitmap()
|
||||||
{
|
{
|
||||||
return bitmapFactory.makeEmptyMutableBitmap();
|
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
|
||||||
|
|
||||||
public class RoaringBitMapFactory implements BitMapFactory
|
public class RoaringBitMapFactory implements BitMapFactory
|
||||||
{
|
{
|
||||||
private static final BitmapFactory bitmapFactory = new RoaringBitmapFactory();
|
private static final BitmapFactory BITMAP_FACTORY = new RoaringBitmapFactory();
|
||||||
|
|
||||||
public RoaringBitMapFactory()
|
public RoaringBitMapFactory()
|
||||||
{
|
{
|
||||||
|
@ -34,7 +34,7 @@ public class RoaringBitMapFactory implements BitMapFactory
|
||||||
@Override
|
@Override
|
||||||
public MutableBitmap makeEmptyMutableBitmap()
|
public MutableBitmap makeEmptyMutableBitmap()
|
||||||
{
|
{
|
||||||
return bitmapFactory.makeEmptyMutableBitmap();
|
return BITMAP_FACTORY.makeEmptyMutableBitmap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -113,20 +113,20 @@ public class DistinctCountGroupByQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupByQuery query = new GroupByQuery.Builder()
|
GroupByQuery query = new GroupByQuery.Builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setGranularity(QueryRunnerTestHelper.allGran)
|
.setGranularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.setDimensions(new DefaultDimensionSpec(
|
.setDimensions(new DefaultDimensionSpec(
|
||||||
client_type,
|
client_type,
|
||||||
client_type
|
client_type
|
||||||
))
|
))
|
||||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.setLimitSpec(
|
.setLimitSpec(
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)),
|
Collections.singletonList(new OrderByColumnSpec(client_type, OrderByColumnSpec.Direction.DESCENDING)),
|
||||||
10
|
10
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, new DistinctCountAggregatorFactory("UV", visitor_id, null))
|
.setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, new DistinctCountAggregatorFactory("UV", visitor_id, null))
|
||||||
.build();
|
.build();
|
||||||
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
|
final Segment incrementalIndexSegment = new IncrementalIndexSegment(index, null);
|
||||||
|
|
||||||
|
|
|
@ -86,12 +86,12 @@ public class DistinctCountTimeseriesQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
QueryRunnerTestHelper.rowsCount,
|
QueryRunnerTestHelper.ROWS_COUNT,
|
||||||
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -115,14 +115,14 @@ public class DistinctCountTopNQueryTest
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.dataSource)
|
TopNQuery query = new TopNQueryBuilder().dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.dimension(client_type)
|
.dimension(client_type)
|
||||||
.metric("UV")
|
.metric("UV")
|
||||||
.threshold(10)
|
.threshold(10)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
QueryRunnerTestHelper.rowsCount,
|
QueryRunnerTestHelper.ROWS_COUNT,
|
||||||
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
new DistinctCountAggregatorFactory("UV", visitor_id, null)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -69,7 +69,7 @@ import java.util.stream.Collectors;
|
||||||
public class DerivativeDataSourceManager
|
public class DerivativeDataSourceManager
|
||||||
{
|
{
|
||||||
private static final EmittingLogger log = new EmittingLogger(DerivativeDataSourceManager.class);
|
private static final EmittingLogger log = new EmittingLogger(DerivativeDataSourceManager.class);
|
||||||
private static final AtomicReference<ConcurrentHashMap<String, SortedSet<DerivativeDataSource>>> derivativesRef =
|
private static final AtomicReference<ConcurrentHashMap<String, SortedSet<DerivativeDataSource>>> DERIVATIVES_REF =
|
||||||
new AtomicReference<>(new ConcurrentHashMap<>());
|
new AtomicReference<>(new ConcurrentHashMap<>());
|
||||||
private final MaterializedViewConfig config;
|
private final MaterializedViewConfig config;
|
||||||
private final Supplier<MetadataStorageTablesConfig> dbTables;
|
private final Supplier<MetadataStorageTablesConfig> dbTables;
|
||||||
|
@ -137,7 +137,7 @@ public class DerivativeDataSourceManager
|
||||||
started = false;
|
started = false;
|
||||||
future.cancel(true);
|
future.cancel(true);
|
||||||
future = null;
|
future = null;
|
||||||
derivativesRef.set(new ConcurrentHashMap<>());
|
DERIVATIVES_REF.set(new ConcurrentHashMap<>());
|
||||||
exec.shutdownNow();
|
exec.shutdownNow();
|
||||||
exec = null;
|
exec = null;
|
||||||
}
|
}
|
||||||
|
@ -145,12 +145,12 @@ public class DerivativeDataSourceManager
|
||||||
|
|
||||||
public static ImmutableSet<DerivativeDataSource> getDerivatives(String datasource)
|
public static ImmutableSet<DerivativeDataSource> getDerivatives(String datasource)
|
||||||
{
|
{
|
||||||
return ImmutableSet.copyOf(derivativesRef.get().getOrDefault(datasource, new TreeSet<>()));
|
return ImmutableSet.copyOf(DERIVATIVES_REF.get().getOrDefault(datasource, new TreeSet<>()));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static ImmutableMap<String, Set<DerivativeDataSource>> getAllDerivatives()
|
public static ImmutableMap<String, Set<DerivativeDataSource>> getAllDerivatives()
|
||||||
{
|
{
|
||||||
return ImmutableMap.copyOf(derivativesRef.get());
|
return ImmutableMap.copyOf(DERIVATIVES_REF.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateDerivatives()
|
private void updateDerivatives()
|
||||||
|
@ -205,8 +205,8 @@ public class DerivativeDataSourceManager
|
||||||
}
|
}
|
||||||
ConcurrentHashMap<String, SortedSet<DerivativeDataSource>> current;
|
ConcurrentHashMap<String, SortedSet<DerivativeDataSource>> current;
|
||||||
do {
|
do {
|
||||||
current = derivativesRef.get();
|
current = DERIVATIVES_REF.get();
|
||||||
} while (!derivativesRef.compareAndSet(current, newDerivatives));
|
} while (!DERIVATIVES_REF.compareAndSet(current, newDerivatives));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -203,7 +203,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
||||||
// build user query
|
// build user query
|
||||||
TopNQuery userQuery = new TopNQueryBuilder()
|
TopNQuery userQuery = new TopNQueryBuilder()
|
||||||
.dataSource("base")
|
.dataSource("base")
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension("dim1")
|
.dimension("dim1")
|
||||||
.metric("cost")
|
.metric("cost")
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
|
@ -214,7 +214,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
||||||
List<Query> expectedQueryAfterOptimizing = Lists.newArrayList(
|
List<Query> expectedQueryAfterOptimizing = Lists.newArrayList(
|
||||||
new TopNQueryBuilder()
|
new TopNQueryBuilder()
|
||||||
.dataSource("derivative")
|
.dataSource("derivative")
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension("dim1")
|
.dimension("dim1")
|
||||||
.metric("cost")
|
.metric("cost")
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
|
@ -223,7 +223,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
|
||||||
.build(),
|
.build(),
|
||||||
new TopNQueryBuilder()
|
new TopNQueryBuilder()
|
||||||
.dataSource("base")
|
.dataSource("base")
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension("dim1")
|
.dimension("dim1")
|
||||||
.metric("cost")
|
.metric("cost")
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
|
|
|
@ -44,10 +44,10 @@ public class MaterializedViewQueryQueryToolChestTest
|
||||||
public void testMakePostComputeManipulatorFn()
|
public void testMakePostComputeManipulatorFn()
|
||||||
{
|
{
|
||||||
TimeseriesQuery realQuery = Druids.newTimeseriesQueryBuilder()
|
TimeseriesQuery realQuery = Druids.newTimeseriesQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.dayGran)
|
.granularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(QueryRunnerTestHelper.rowsCount)
|
.aggregators(QueryRunnerTestHelper.ROWS_COUNT)
|
||||||
.descending(true)
|
.descending(true)
|
||||||
.build();
|
.build();
|
||||||
MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null);
|
MaterializedViewQuery materializedViewQuery = new MaterializedViewQuery(realQuery, null);
|
||||||
|
@ -87,7 +87,7 @@ public class MaterializedViewQueryQueryToolChestTest
|
||||||
|
|
||||||
Assert.assertEquals(postResult.getTimestamp(), result.getTimestamp());
|
Assert.assertEquals(postResult.getTimestamp(), result.getTimestamp());
|
||||||
Assert.assertEquals(postResultMap.size(), 2);
|
Assert.assertEquals(postResultMap.size(), 2);
|
||||||
Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.rowsCount.getName()), "metricvalue1");
|
Assert.assertEquals(postResultMap.get(QueryRunnerTestHelper.ROWS_COUNT.getName()), "metricvalue1");
|
||||||
Assert.assertEquals(postResultMap.get("dim1"), "dimvalue1");
|
Assert.assertEquals(postResultMap.get("dim1"), "dimvalue1");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,15 +43,15 @@ import java.io.IOException;
|
||||||
|
|
||||||
public class MaterializedViewQueryTest
|
public class MaterializedViewQueryTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
|
private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper();
|
||||||
private DataSourceOptimizer optimizer;
|
private DataSourceOptimizer optimizer;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp()
|
public void setUp()
|
||||||
{
|
{
|
||||||
jsonMapper.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE));
|
JSON_MAPPER.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE));
|
||||||
optimizer = EasyMock.createMock(DataSourceOptimizer.class);
|
optimizer = EasyMock.createMock(DataSourceOptimizer.class);
|
||||||
jsonMapper.setInjectableValues(
|
JSON_MAPPER.setInjectableValues(
|
||||||
new InjectableValues.Std()
|
new InjectableValues.Std()
|
||||||
.addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE)
|
.addValue(ExprMacroTable.class.getName(), LookupEnabledTestExprMacroTable.INSTANCE)
|
||||||
.addValue(DataSourceOptimizer.class, optimizer)
|
.addValue(DataSourceOptimizer.class, optimizer)
|
||||||
|
@ -62,16 +62,16 @@ public class MaterializedViewQueryTest
|
||||||
public void testQuerySerialization() throws IOException
|
public void testQuerySerialization() throws IOException
|
||||||
{
|
{
|
||||||
TopNQuery topNQuery = new TopNQueryBuilder()
|
TopNQuery topNQuery = new TopNQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||||
.metric(QueryRunnerTestHelper.indexMetric)
|
.metric(QueryRunnerTestHelper.INDEX_METRIC)
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||||
new DoubleMinAggregatorFactory("minIndex", "index")
|
new DoubleMinAggregatorFactory("minIndex", "index")
|
||||||
|
@ -79,14 +79,14 @@ public class MaterializedViewQueryTest
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
|
.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)
|
||||||
.build();
|
.build();
|
||||||
MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
|
MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
|
||||||
String json = jsonMapper.writeValueAsString(query);
|
String json = JSON_MAPPER.writeValueAsString(query);
|
||||||
Query serdeQuery = jsonMapper.readValue(json, Query.class);
|
Query serdeQuery = JSON_MAPPER.readValue(json, Query.class);
|
||||||
Assert.assertEquals(query, serdeQuery);
|
Assert.assertEquals(query, serdeQuery);
|
||||||
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.dataSource), query.getDataSource());
|
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE), query.getDataSource());
|
||||||
Assert.assertEquals(QueryRunnerTestHelper.allGran, query.getGranularity());
|
Assert.assertEquals(QueryRunnerTestHelper.ALL_GRAN, query.getGranularity());
|
||||||
Assert.assertEquals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(), query.getIntervals());
|
Assert.assertEquals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC.getIntervals(), query.getIntervals());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -60,22 +60,22 @@ public class MovingAverageIterableTest
|
||||||
private static final String AGE = "age";
|
private static final String AGE = "age";
|
||||||
private static final String COUNTRY = "country";
|
private static final String COUNTRY = "country";
|
||||||
|
|
||||||
private static final Map<String, Object> dims1 = new HashMap<>();
|
private static final Map<String, Object> DIMS1 = new HashMap<>();
|
||||||
private static final Map<String, Object> dims2 = new HashMap<>();
|
private static final Map<String, Object> DIMS2 = new HashMap<>();
|
||||||
private static final Map<String, Object> dims3 = new HashMap<>();
|
private static final Map<String, Object> DIMS3 = new HashMap<>();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
dims1.put(GENDER, "m");
|
DIMS1.put(GENDER, "m");
|
||||||
dims1.put(AGE, "10");
|
DIMS1.put(AGE, "10");
|
||||||
dims1.put(COUNTRY, "US");
|
DIMS1.put(COUNTRY, "US");
|
||||||
|
|
||||||
dims2.put(GENDER, "f");
|
DIMS2.put(GENDER, "f");
|
||||||
dims2.put(AGE, "8");
|
DIMS2.put(AGE, "8");
|
||||||
dims2.put(COUNTRY, "US");
|
DIMS2.put(COUNTRY, "US");
|
||||||
|
|
||||||
dims3.put(GENDER, "u");
|
DIMS3.put(GENDER, "u");
|
||||||
dims3.put(AGE, "5");
|
DIMS3.put(AGE, "5");
|
||||||
dims3.put(COUNTRY, "UK");
|
DIMS3.put(COUNTRY, "UK");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -90,16 +90,16 @@ public class MovingAverageIterableTest
|
||||||
|
|
||||||
Sequence<RowBucket> dayBuckets = Sequences.simple(Arrays.asList(
|
Sequence<RowBucket> dayBuckets = Sequences.simple(Arrays.asList(
|
||||||
new RowBucket(JAN_1, Arrays.asList(
|
new RowBucket(JAN_1, Arrays.asList(
|
||||||
new MapBasedRow(JAN_1, dims1),
|
new MapBasedRow(JAN_1, DIMS1),
|
||||||
new MapBasedRow(JAN_1, dims2)
|
new MapBasedRow(JAN_1, DIMS2)
|
||||||
)),
|
)),
|
||||||
new RowBucket(JAN_2, Collections.singletonList(
|
new RowBucket(JAN_2, Collections.singletonList(
|
||||||
new MapBasedRow(JAN_2, dims1)
|
new MapBasedRow(JAN_2, DIMS1)
|
||||||
)),
|
)),
|
||||||
new RowBucket(JAN_3, Collections.emptyList()),
|
new RowBucket(JAN_3, Collections.emptyList()),
|
||||||
new RowBucket(JAN_4, Arrays.asList(
|
new RowBucket(JAN_4, Arrays.asList(
|
||||||
new MapBasedRow(JAN_4, dims2),
|
new MapBasedRow(JAN_4, DIMS2),
|
||||||
new MapBasedRow(JAN_4, dims3)
|
new MapBasedRow(JAN_4, DIMS3)
|
||||||
))
|
))
|
||||||
));
|
));
|
||||||
|
|
||||||
|
|
|
@ -125,7 +125,7 @@ public class MapVirtualColumnGroupByTest
|
||||||
public void testWithMapColumn()
|
public void testWithMapColumn()
|
||||||
{
|
{
|
||||||
final GroupByQuery query = new GroupByQuery(
|
final GroupByQuery query = new GroupByQuery(
|
||||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||||
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
||||||
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
||||||
null,
|
null,
|
||||||
|
@ -148,7 +148,7 @@ public class MapVirtualColumnGroupByTest
|
||||||
public void testWithSubColumn()
|
public void testWithSubColumn()
|
||||||
{
|
{
|
||||||
final GroupByQuery query = new GroupByQuery(
|
final GroupByQuery query = new GroupByQuery(
|
||||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||||
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
new MultipleIntervalSegmentSpec(ImmutableList.of(Intervals.of("2011/2012"))),
|
||||||
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
VirtualColumns.create(ImmutableList.of(new MapVirtualColumn("keys", "values", "params"))),
|
||||||
null,
|
null,
|
||||||
|
|
|
@ -133,9 +133,9 @@ public class MapVirtualColumnSelectTest
|
||||||
private Druids.SelectQueryBuilder testBuilder()
|
private Druids.SelectQueryBuilder testBuilder()
|
||||||
{
|
{
|
||||||
return Druids.newSelectQueryBuilder()
|
return Druids.newSelectQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.pagingSpec(new PagingSpec(null, 3));
|
.pagingSpec(new PagingSpec(null, 3));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,7 +197,7 @@ public class MapVirtualColumnSelectTest
|
||||||
Assert.assertEquals(expected.size(), events.size());
|
Assert.assertEquals(expected.size(), events.size());
|
||||||
for (int i = 0; i < events.size(); i++) {
|
for (int i = 0; i < events.size(); i++) {
|
||||||
Map event = events.get(i).getEvent();
|
Map event = events.get(i).getEvent();
|
||||||
event.remove(EventHolder.timestampKey);
|
event.remove(EventHolder.TIMESTAMP_KEY);
|
||||||
Assert.assertEquals(expected.get(i), event);
|
Assert.assertEquals(expected.get(i), event);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class MapVirtualColumnTopNTest
|
||||||
public void testWithMapColumn()
|
public void testWithMapColumn()
|
||||||
{
|
{
|
||||||
final TopNQuery query = new TopNQuery(
|
final TopNQuery query = new TopNQuery(
|
||||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||||
VirtualColumns.create(
|
VirtualColumns.create(
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new MapVirtualColumn("keys", "values", "params")
|
new MapVirtualColumn("keys", "values", "params")
|
||||||
|
@ -111,7 +111,7 @@ public class MapVirtualColumnTopNTest
|
||||||
public void testWithSubColumn()
|
public void testWithSubColumn()
|
||||||
{
|
{
|
||||||
final TopNQuery query = new TopNQuery(
|
final TopNQuery query = new TopNQuery(
|
||||||
new TableDataSource(QueryRunnerTestHelper.dataSource),
|
new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE),
|
||||||
VirtualColumns.create(
|
VirtualColumns.create(
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new MapVirtualColumn("keys", "values", "params")
|
new MapVirtualColumn("keys", "values", "params")
|
||||||
|
|
|
@ -39,7 +39,7 @@ import java.nio.ByteBuffer;
|
||||||
public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
||||||
{
|
{
|
||||||
|
|
||||||
private static final DoublesSketchObjectStrategy strategy = new DoublesSketchObjectStrategy();
|
private static final DoublesSketchObjectStrategy STRATEGY = new DoublesSketchObjectStrategy();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getTypeName()
|
public String getTypeName()
|
||||||
|
@ -50,7 +50,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
||||||
@Override
|
@Override
|
||||||
public ObjectStrategy<DoublesSketch> getObjectStrategy()
|
public ObjectStrategy<DoublesSketch> getObjectStrategy()
|
||||||
{
|
{
|
||||||
return strategy;
|
return STRATEGY;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -105,7 +105,7 @@ public class DoublesSketchComplexMetricSerde extends ComplexMetricSerde
|
||||||
@Override
|
@Override
|
||||||
public void deserializeColumn(final ByteBuffer buffer, final ColumnBuilder builder)
|
public void deserializeColumn(final ByteBuffer buffer, final ColumnBuilder builder)
|
||||||
{
|
{
|
||||||
final GenericIndexed<DoublesSketch> column = GenericIndexed.read(buffer, strategy, builder.getFileMapper());
|
final GenericIndexed<DoublesSketch> column = GenericIndexed.read(buffer, STRATEGY, builder.getFileMapper());
|
||||||
builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column));
|
builder.setComplexColumnSupplier(new ComplexColumnPartSupplier(getTypeName(), column));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,14 +57,14 @@ import java.util.stream.IntStream;
|
||||||
|
|
||||||
public class BloomFilterAggregatorTest
|
public class BloomFilterAggregatorTest
|
||||||
{
|
{
|
||||||
private static final String nullish = NullHandling.replaceWithDefault() ? "" : null;
|
private static final String NULLISH = NullHandling.replaceWithDefault() ? "" : null;
|
||||||
private static final List<String[]> values1 = dimensionValues(
|
private static final List<String[]> VALUES1 = dimensionValues(
|
||||||
"a",
|
"a",
|
||||||
"b",
|
"b",
|
||||||
"c",
|
"c",
|
||||||
"a",
|
"a",
|
||||||
"a",
|
"a",
|
||||||
nullish,
|
NULLISH,
|
||||||
"b",
|
"b",
|
||||||
"b",
|
"b",
|
||||||
"b",
|
"b",
|
||||||
|
@ -72,7 +72,7 @@ public class BloomFilterAggregatorTest
|
||||||
"a",
|
"a",
|
||||||
"a"
|
"a"
|
||||||
);
|
);
|
||||||
private static final List<String[]> values2 = dimensionValues(
|
private static final List<String[]> VALUES2 = dimensionValues(
|
||||||
"a",
|
"a",
|
||||||
"b",
|
"b",
|
||||||
"c",
|
"c",
|
||||||
|
@ -80,17 +80,17 @@ public class BloomFilterAggregatorTest
|
||||||
"a",
|
"a",
|
||||||
"e",
|
"e",
|
||||||
"b",
|
"b",
|
||||||
new String[]{nullish, "x"},
|
new String[]{NULLISH, "x"},
|
||||||
new String[]{"x", nullish},
|
new String[]{"x", NULLISH},
|
||||||
new String[]{"y", "x"},
|
new String[]{"y", "x"},
|
||||||
new String[]{"x", "y"},
|
new String[]{"x", "y"},
|
||||||
new String[]{"x", "y", "a"}
|
new String[]{"x", "y", "a"}
|
||||||
);
|
);
|
||||||
private static final Double[] doubleValues1 = new Double[]{0.1, 1.5, 18.3, 0.1};
|
private static final Double[] DOUBLE_VALUES1 = new Double[]{0.1, 1.5, 18.3, 0.1};
|
||||||
private static final Float[] floatValues1 = new Float[]{0.4f, 0.8f, 23.2f};
|
private static final Float[] FLOAT_VALUES1 = new Float[]{0.4f, 0.8f, 23.2f};
|
||||||
private static final Long[] longValues1 = new Long[]{10241L, 12312355L, 0L, 81L};
|
private static final Long[] LONG_VALUES1 = new Long[]{10241L, 12312355L, 0L, 81L};
|
||||||
|
|
||||||
private static final int maxNumValues = 15;
|
private static final int MAX_NUM_VALUES = 15;
|
||||||
|
|
||||||
private static BloomKFilter filter1;
|
private static BloomKFilter filter1;
|
||||||
private static BloomKFilter filter2;
|
private static BloomKFilter filter2;
|
||||||
|
@ -104,31 +104,31 @@ public class BloomFilterAggregatorTest
|
||||||
|
|
||||||
static {
|
static {
|
||||||
try {
|
try {
|
||||||
filter1 = new BloomKFilter(maxNumValues);
|
filter1 = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
filter2 = new BloomKFilter(maxNumValues);
|
filter2 = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
BloomKFilter combinedValuesFilter = new BloomKFilter(maxNumValues);
|
BloomKFilter combinedValuesFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
|
|
||||||
createStringFilter(values1, filter1, combinedValuesFilter);
|
createStringFilter(VALUES1, filter1, combinedValuesFilter);
|
||||||
createStringFilter(values2, filter2, combinedValuesFilter);
|
createStringFilter(VALUES2, filter2, combinedValuesFilter);
|
||||||
|
|
||||||
serializedFilter1 = filterToString(filter1);
|
serializedFilter1 = filterToString(filter1);
|
||||||
serializedFilter2 = filterToString(filter2);
|
serializedFilter2 = filterToString(filter2);
|
||||||
serializedCombinedFilter = filterToString(combinedValuesFilter);
|
serializedCombinedFilter = filterToString(combinedValuesFilter);
|
||||||
|
|
||||||
BloomKFilter longFilter = new BloomKFilter(maxNumValues);
|
BloomKFilter longFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
for (long val : longValues1) {
|
for (long val : LONG_VALUES1) {
|
||||||
longFilter.addLong(val);
|
longFilter.addLong(val);
|
||||||
}
|
}
|
||||||
serializedLongFilter = filterToString(longFilter);
|
serializedLongFilter = filterToString(longFilter);
|
||||||
|
|
||||||
BloomKFilter floatFilter = new BloomKFilter(maxNumValues);
|
BloomKFilter floatFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
for (float val : floatValues1) {
|
for (float val : FLOAT_VALUES1) {
|
||||||
floatFilter.addFloat(val);
|
floatFilter.addFloat(val);
|
||||||
}
|
}
|
||||||
serializedFloatFilter = filterToString(floatFilter);
|
serializedFloatFilter = filterToString(floatFilter);
|
||||||
|
|
||||||
BloomKFilter doubleFilter = new BloomKFilter(maxNumValues);
|
BloomKFilter doubleFilter = new BloomKFilter(MAX_NUM_VALUES);
|
||||||
for (double val : doubleValues1) {
|
for (double val : DOUBLE_VALUES1) {
|
||||||
doubleFilter.addDouble(val);
|
doubleFilter.addDouble(val);
|
||||||
}
|
}
|
||||||
serializedDoubleFilter = filterToString(doubleFilter);
|
serializedDoubleFilter = filterToString(doubleFilter);
|
||||||
|
@ -232,7 +232,7 @@ public class BloomFilterAggregatorTest
|
||||||
valueAggregatorFactory = new BloomFilterAggregatorFactory(
|
valueAggregatorFactory = new BloomFilterAggregatorFactory(
|
||||||
"billy",
|
"billy",
|
||||||
dimSpec,
|
dimSpec,
|
||||||
maxNumValues
|
MAX_NUM_VALUES
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -240,10 +240,10 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testAggregateValues() throws IOException
|
public void testAggregateValues() throws IOException
|
||||||
{
|
{
|
||||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values1, null);
|
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null);
|
||||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true);
|
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (int i = 0; i < values1.size(); ++i) {
|
for (int i = 0; i < VALUES1.size(); ++i) {
|
||||||
aggregateDimension(Collections.singletonList(dimSelector), agg);
|
aggregateDimension(Collections.singletonList(dimSelector), agg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,10 +257,10 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testAggregateLongValues() throws IOException
|
public void testAggregateLongValues() throws IOException
|
||||||
{
|
{
|
||||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1));
|
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1));
|
||||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true);
|
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (Long ignored : longValues1) {
|
for (Long ignored : LONG_VALUES1) {
|
||||||
aggregateColumn(Collections.singletonList(selector), agg);
|
aggregateColumn(Collections.singletonList(selector), agg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -274,10 +274,10 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testAggregateFloatValues() throws IOException
|
public void testAggregateFloatValues() throws IOException
|
||||||
{
|
{
|
||||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1));
|
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1));
|
||||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true);
|
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (Float ignored : floatValues1) {
|
for (Float ignored : FLOAT_VALUES1) {
|
||||||
aggregateColumn(Collections.singletonList(selector), agg);
|
aggregateColumn(Collections.singletonList(selector), agg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,10 +291,10 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testAggregateDoubleValues() throws IOException
|
public void testAggregateDoubleValues() throws IOException
|
||||||
{
|
{
|
||||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1));
|
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1));
|
||||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true);
|
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (Double ignored : doubleValues1) {
|
for (Double ignored : DOUBLE_VALUES1) {
|
||||||
aggregateColumn(Collections.singletonList(selector), agg);
|
aggregateColumn(Collections.singletonList(selector), agg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,8 +308,8 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testBufferAggregateStringValues() throws IOException
|
public void testBufferAggregateStringValues() throws IOException
|
||||||
{
|
{
|
||||||
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(values2, null);
|
DimensionSelector dimSelector = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null);
|
||||||
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, maxNumValues, true);
|
StringBloomFilterAggregator agg = new StringBloomFilterAggregator(dimSelector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||||
|
@ -318,7 +318,7 @@ public class BloomFilterAggregatorTest
|
||||||
|
|
||||||
agg.init(buf, pos);
|
agg.init(buf, pos);
|
||||||
|
|
||||||
for (int i = 0; i < values2.size(); ++i) {
|
for (int i = 0; i < VALUES2.size(); ++i) {
|
||||||
bufferAggregateDimension(Collections.singletonList(dimSelector), agg, buf, pos);
|
bufferAggregateDimension(Collections.singletonList(dimSelector), agg, buf, pos);
|
||||||
}
|
}
|
||||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||||
|
@ -331,8 +331,8 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testBufferAggregateLongValues() throws IOException
|
public void testBufferAggregateLongValues() throws IOException
|
||||||
{
|
{
|
||||||
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(longValues1));
|
TestLongColumnSelector selector = new TestLongColumnSelector(Arrays.asList(LONG_VALUES1));
|
||||||
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, maxNumValues, true);
|
LongBloomFilterAggregator agg = new LongBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||||
|
@ -341,7 +341,7 @@ public class BloomFilterAggregatorTest
|
||||||
|
|
||||||
agg.init(buf, pos);
|
agg.init(buf, pos);
|
||||||
|
|
||||||
IntStream.range(0, longValues1.length)
|
IntStream.range(0, LONG_VALUES1.length)
|
||||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||||
|
@ -353,8 +353,8 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testBufferAggregateFloatValues() throws IOException
|
public void testBufferAggregateFloatValues() throws IOException
|
||||||
{
|
{
|
||||||
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(floatValues1));
|
TestFloatColumnSelector selector = new TestFloatColumnSelector(Arrays.asList(FLOAT_VALUES1));
|
||||||
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, maxNumValues, true);
|
FloatBloomFilterAggregator agg = new FloatBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||||
|
@ -363,7 +363,7 @@ public class BloomFilterAggregatorTest
|
||||||
|
|
||||||
agg.init(buf, pos);
|
agg.init(buf, pos);
|
||||||
|
|
||||||
IntStream.range(0, floatValues1.length)
|
IntStream.range(0, FLOAT_VALUES1.length)
|
||||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||||
|
@ -375,8 +375,8 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testBufferAggregateDoubleValues() throws IOException
|
public void testBufferAggregateDoubleValues() throws IOException
|
||||||
{
|
{
|
||||||
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(doubleValues1));
|
TestDoubleColumnSelector selector = new TestDoubleColumnSelector(Arrays.asList(DOUBLE_VALUES1));
|
||||||
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, maxNumValues, true);
|
DoubleBloomFilterAggregator agg = new DoubleBloomFilterAggregator(selector, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
int maxSize = valueAggregatorFactory.getMaxIntermediateSizeWithNulls();
|
||||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||||
|
@ -385,7 +385,7 @@ public class BloomFilterAggregatorTest
|
||||||
|
|
||||||
agg.init(buf, pos);
|
agg.init(buf, pos);
|
||||||
|
|
||||||
IntStream.range(0, doubleValues1.length)
|
IntStream.range(0, DOUBLE_VALUES1.length)
|
||||||
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
.forEach(i -> bufferAggregateColumn(Collections.singletonList(selector), agg, buf, pos));
|
||||||
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
BloomKFilter bloomKFilter = BloomKFilter.deserialize(
|
||||||
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
(ByteBuffer) valueAggregatorFactory.finalizeComputation(agg.get(buf, pos))
|
||||||
|
@ -397,16 +397,16 @@ public class BloomFilterAggregatorTest
|
||||||
@Test
|
@Test
|
||||||
public void testCombineValues() throws IOException
|
public void testCombineValues() throws IOException
|
||||||
{
|
{
|
||||||
DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(values1, null);
|
DimensionSelector dimSelector1 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES1, null);
|
||||||
DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(values2, null);
|
DimensionSelector dimSelector2 = new CardinalityAggregatorTest.TestDimensionSelector(VALUES2, null);
|
||||||
|
|
||||||
StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, maxNumValues, true);
|
StringBloomFilterAggregator agg1 = new StringBloomFilterAggregator(dimSelector1, MAX_NUM_VALUES, true);
|
||||||
StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, maxNumValues, true);
|
StringBloomFilterAggregator agg2 = new StringBloomFilterAggregator(dimSelector2, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (int i = 0; i < values1.size(); ++i) {
|
for (int i = 0; i < VALUES1.size(); ++i) {
|
||||||
aggregateDimension(Collections.singletonList(dimSelector1), agg1);
|
aggregateDimension(Collections.singletonList(dimSelector1), agg1);
|
||||||
}
|
}
|
||||||
for (int i = 0; i < values2.size(); ++i) {
|
for (int i = 0; i < VALUES2.size(); ++i) {
|
||||||
aggregateDimension(Collections.singletonList(dimSelector2), agg2);
|
aggregateDimension(Collections.singletonList(dimSelector2), agg2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -435,7 +435,7 @@ public class BloomFilterAggregatorTest
|
||||||
);
|
);
|
||||||
|
|
||||||
BloomFilterMergeAggregator mergeAggregator =
|
BloomFilterMergeAggregator mergeAggregator =
|
||||||
new BloomFilterMergeAggregator(mergeDim, maxNumValues, true);
|
new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (int i = 0; i < 2; ++i) {
|
for (int i = 0; i < 2; ++i) {
|
||||||
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
||||||
|
@ -461,7 +461,7 @@ public class BloomFilterAggregatorTest
|
||||||
);
|
);
|
||||||
|
|
||||||
BloomFilterMergeAggregator mergeAggregator =
|
BloomFilterMergeAggregator mergeAggregator =
|
||||||
new BloomFilterMergeAggregator(mergeDim, maxNumValues, true);
|
new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, true);
|
||||||
|
|
||||||
for (int i = 0; i < 2; ++i) {
|
for (int i = 0; i < 2; ++i) {
|
||||||
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
aggregateColumn(Collections.singletonList(mergeDim), mergeAggregator);
|
||||||
|
@ -486,7 +486,7 @@ public class BloomFilterAggregatorTest
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, maxNumValues, false);
|
BloomFilterMergeAggregator mergeAggregator = new BloomFilterMergeAggregator(mergeDim, MAX_NUM_VALUES, false);
|
||||||
|
|
||||||
int maxSize = valueAggregatorFactory.getCombiningFactory().getMaxIntermediateSizeWithNulls();
|
int maxSize = valueAggregatorFactory.getCombiningFactory().getMaxIntermediateSizeWithNulls();
|
||||||
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
ByteBuffer buf = ByteBuffer.allocate(maxSize + 64);
|
||||||
|
@ -513,7 +513,7 @@ public class BloomFilterAggregatorTest
|
||||||
BloomFilterAggregatorFactory factory = new BloomFilterAggregatorFactory(
|
BloomFilterAggregatorFactory factory = new BloomFilterAggregatorFactory(
|
||||||
"billy",
|
"billy",
|
||||||
new DefaultDimensionSpec("b", "b"),
|
new DefaultDimensionSpec("b", "b"),
|
||||||
maxNumValues
|
MAX_NUM_VALUES
|
||||||
);
|
);
|
||||||
ObjectMapper objectMapper = new DefaultObjectMapper();
|
ObjectMapper objectMapper = new DefaultObjectMapper();
|
||||||
new BloomFilterExtensionModule().getJacksonModules().forEach(objectMapper::registerModule);
|
new BloomFilterExtensionModule().getJacksonModules().forEach(objectMapper::registerModule);
|
||||||
|
@ -536,7 +536,7 @@ public class BloomFilterAggregatorTest
|
||||||
BloomFilterAggregatorFactory factory2 = new BloomFilterAggregatorFactory(
|
BloomFilterAggregatorFactory factory2 = new BloomFilterAggregatorFactory(
|
||||||
"billy",
|
"billy",
|
||||||
new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)),
|
new ExtractionDimensionSpec("b", "b", new RegexDimExtractionFn(".*", false, null)),
|
||||||
maxNumValues
|
MAX_NUM_VALUES
|
||||||
);
|
);
|
||||||
|
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
|
@ -547,7 +547,7 @@ public class BloomFilterAggregatorTest
|
||||||
BloomFilterAggregatorFactory factory3 = new BloomFilterAggregatorFactory(
|
BloomFilterAggregatorFactory factory3 = new BloomFilterAggregatorFactory(
|
||||||
"billy",
|
"billy",
|
||||||
new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"),
|
new RegexFilteredDimensionSpec(new DefaultDimensionSpec("a", "a"), ".*"),
|
||||||
maxNumValues
|
MAX_NUM_VALUES
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
factory3,
|
factory3,
|
||||||
|
|
|
@ -53,13 +53,13 @@ import java.util.List;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class BloomFilterGroupByQueryTest
|
public class BloomFilterGroupByQueryTest
|
||||||
{
|
{
|
||||||
private static final BloomFilterExtensionModule module = new BloomFilterExtensionModule();
|
private static final BloomFilterExtensionModule MODULE = new BloomFilterExtensionModule();
|
||||||
|
|
||||||
static {
|
static {
|
||||||
// throwaway, just using to properly initialize jackson modules
|
// throwaway, just using to properly initialize jackson modules
|
||||||
Guice.createInjector(
|
Guice.createInjector(
|
||||||
binder -> binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()),
|
binder -> binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()),
|
||||||
module
|
MODULE
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ public class BloomFilterGroupByQueryTest
|
||||||
public BloomFilterGroupByQueryTest(final GroupByQueryConfig config)
|
public BloomFilterGroupByQueryTest(final GroupByQueryConfig config)
|
||||||
{
|
{
|
||||||
helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(
|
helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(
|
||||||
Lists.newArrayList(module.getJacksonModules()),
|
Lists.newArrayList(MODULE.getJacksonModules()),
|
||||||
config,
|
config,
|
||||||
tempFolder
|
tempFolder
|
||||||
);
|
);
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class BloomFilterSqlAggregatorTest
|
||||||
{
|
{
|
||||||
private static final int TEST_NUM_ENTRIES = 1000;
|
private static final int TEST_NUM_ENTRIES = 1000;
|
||||||
private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
|
private static AuthenticationResult authenticationResult = CalciteTests.REGULAR_USER_AUTH_RESULT;
|
||||||
private static final Injector injector = Guice.createInjector(
|
private static final Injector INJECTOR = Guice.createInjector(
|
||||||
binder -> {
|
binder -> {
|
||||||
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
||||||
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
||||||
|
@ -111,7 +111,7 @@ public class BloomFilterSqlAggregatorTest
|
||||||
);
|
);
|
||||||
|
|
||||||
private static ObjectMapper jsonMapper =
|
private static ObjectMapper jsonMapper =
|
||||||
injector
|
INJECTOR
|
||||||
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
||||||
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,7 @@ import java.util.Map;
|
||||||
|
|
||||||
public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
||||||
{
|
{
|
||||||
private static final Injector injector = Guice.createInjector(
|
private static final Injector INJECTOR = Guice.createInjector(
|
||||||
binder -> {
|
binder -> {
|
||||||
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper());
|
||||||
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
binder.bind(LookupExtractorFactoryContainerProvider.class).toInstance(
|
||||||
|
@ -80,7 +80,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
private static ObjectMapper jsonMapper =
|
private static ObjectMapper jsonMapper =
|
||||||
injector
|
INJECTOR
|
||||||
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
.getInstance(Key.get(ObjectMapper.class, Json.class))
|
||||||
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
.registerModules(Collections.singletonList(new BloomFilterSerializersModule()));
|
||||||
|
|
||||||
|
@ -88,10 +88,10 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
||||||
{
|
{
|
||||||
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
|
final List<ExprMacroTable.ExprMacro> exprMacros = new ArrayList<>();
|
||||||
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
|
for (Class<? extends ExprMacroTable.ExprMacro> clazz : ExpressionModule.EXPR_MACROS) {
|
||||||
exprMacros.add(injector.getInstance(clazz));
|
exprMacros.add(INJECTOR.getInstance(clazz));
|
||||||
}
|
}
|
||||||
exprMacros.add(injector.getInstance(BloomFilterExprMacro.class));
|
exprMacros.add(INJECTOR.getInstance(BloomFilterExprMacro.class));
|
||||||
exprMacros.add(injector.getInstance(LookupExprMacro.class));
|
exprMacros.add(INJECTOR.getInstance(LookupExprMacro.class));
|
||||||
return new ExprMacroTable(exprMacros);
|
return new ExprMacroTable(exprMacros);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -278,7 +278,7 @@ public class BloomDimFilterSqlTest extends BaseCalciteQueryTest
|
||||||
{
|
{
|
||||||
final DruidOperatorTable operatorTable = new DruidOperatorTable(
|
final DruidOperatorTable operatorTable = new DruidOperatorTable(
|
||||||
ImmutableSet.of(),
|
ImmutableSet.of(),
|
||||||
ImmutableSet.of(injector.getInstance(BloomFilterOperatorConversion.class))
|
ImmutableSet.of(INJECTOR.getInstance(BloomFilterOperatorConversion.class))
|
||||||
);
|
);
|
||||||
return getResults(
|
return getResults(
|
||||||
plannerConfig,
|
plannerConfig,
|
||||||
|
|
|
@ -52,7 +52,7 @@ import java.util.List;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class ApproximateHistogramGroupByQueryTest
|
public class ApproximateHistogramGroupByQueryTest
|
||||||
{
|
{
|
||||||
private static final Closer resourceCloser = Closer.create();
|
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||||
|
|
||||||
private final QueryRunner<Row> runner;
|
private final QueryRunner<Row> runner;
|
||||||
private final GroupByQueryRunnerFactory factory;
|
private final GroupByQueryRunnerFactory factory;
|
||||||
|
@ -124,7 +124,7 @@ public class ApproximateHistogramGroupByQueryTest
|
||||||
config
|
config
|
||||||
);
|
);
|
||||||
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
||||||
resourceCloser.register(factoryAndCloser.rhs);
|
RESOURCE_CLOSER.register(factoryAndCloser.rhs);
|
||||||
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
||||||
final String testName = StringUtils.format(
|
final String testName = StringUtils.format(
|
||||||
"config=%s, runner=%s",
|
"config=%s, runner=%s",
|
||||||
|
@ -152,7 +152,7 @@ public class ApproximateHistogramGroupByQueryTest
|
||||||
@After
|
@After
|
||||||
public void teardown() throws IOException
|
public void teardown() throws IOException
|
||||||
{
|
{
|
||||||
resourceCloser.close();
|
RESOURCE_CLOSER.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -169,18 +169,18 @@ public class ApproximateHistogramGroupByQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupByQuery query = new GroupByQuery.Builder()
|
GroupByQuery query = new GroupByQuery.Builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||||
QueryRunnerTestHelper.marketDimension,
|
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||||
"marketalias"
|
"marketalias"
|
||||||
))
|
))
|
||||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.setLimitSpec(
|
.setLimitSpec(
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||||
.setPostAggregatorSpecs(
|
.setPostAggregatorSpecs(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
||||||
|
@ -230,18 +230,18 @@ public class ApproximateHistogramGroupByQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupByQuery query = new GroupByQuery.Builder()
|
GroupByQuery query = new GroupByQuery.Builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||||
QueryRunnerTestHelper.marketDimension,
|
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||||
"marketalias"
|
"marketalias"
|
||||||
))
|
))
|
||||||
.setInterval(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.setLimitSpec(
|
.setLimitSpec(
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||||
.setPostAggregatorSpecs(
|
.setPostAggregatorSpecs(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
||||||
|
|
|
@ -54,12 +54,12 @@ import java.util.Map;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class ApproximateHistogramTopNQueryTest
|
public class ApproximateHistogramTopNQueryTest
|
||||||
{
|
{
|
||||||
private static final Closer resourceCloser = Closer.create();
|
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void teardown() throws IOException
|
public static void teardown() throws IOException
|
||||||
{
|
{
|
||||||
resourceCloser.close();
|
RESOURCE_CLOSER.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Parameterized.Parameters(name = "{0}")
|
@Parameterized.Parameters(name = "{0}")
|
||||||
|
@ -70,8 +70,8 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
"TopNQueryRunnerFactory-bufferPool",
|
"TopNQueryRunnerFactory-bufferPool",
|
||||||
() -> ByteBuffer.allocate(2000)
|
() -> ByteBuffer.allocate(2000)
|
||||||
);
|
);
|
||||||
resourceCloser.register(defaultPool);
|
RESOURCE_CLOSER.register(defaultPool);
|
||||||
resourceCloser.register(customPool);
|
RESOURCE_CLOSER.register(customPool);
|
||||||
|
|
||||||
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
|
@ -122,16 +122,16 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
TopNQuery query = new TopNQueryBuilder()
|
TopNQuery query = new TopNQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||||
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||||
new DoubleMinAggregatorFactory("minIndex", "index"),
|
new DoubleMinAggregatorFactory("minIndex", "index"),
|
||||||
|
@ -141,8 +141,8 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.postAggregators(
|
.postAggregators(
|
||||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||||
QueryRunnerTestHelper.dependentPostAgg,
|
QueryRunnerTestHelper.DEPENDENT_POST_AGG,
|
||||||
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
new QuantilePostAggregator("quantile", "apphisto", 0.5f)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
@ -153,7 +153,7 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
new TopNResultValue(
|
new TopNResultValue(
|
||||||
Arrays.<Map<String, Object>>asList(
|
Arrays.<Map<String, Object>>asList(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "total_market")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market")
|
||||||
.put("rows", 186L)
|
.put("rows", 186L)
|
||||||
.put("index", 215679.82879638672D)
|
.put("index", 215679.82879638672D)
|
||||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||||
|
@ -184,7 +184,7 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
.build(),
|
.build(),
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "upfront")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront")
|
||||||
.put("rows", 186L)
|
.put("rows", 186L)
|
||||||
.put("index", 192046.1060180664D)
|
.put("index", 192046.1060180664D)
|
||||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||||
|
@ -215,7 +215,7 @@ public class ApproximateHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
.build(),
|
.build(),
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "spot")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot")
|
||||||
.put("rows", 837L)
|
.put("rows", 837L)
|
||||||
.put("index", 95606.57232284546D)
|
.put("index", 95606.57232284546D)
|
||||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||||
|
|
|
@ -52,7 +52,7 @@ import java.util.List;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class FixedBucketsHistogramGroupByQueryTest
|
public class FixedBucketsHistogramGroupByQueryTest
|
||||||
{
|
{
|
||||||
private static final Closer resourceCloser = Closer.create();
|
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||||
|
|
||||||
private final QueryRunner<Row> runner;
|
private final QueryRunner<Row> runner;
|
||||||
private final GroupByQueryRunnerFactory factory;
|
private final GroupByQueryRunnerFactory factory;
|
||||||
|
@ -124,7 +124,7 @@ public class FixedBucketsHistogramGroupByQueryTest
|
||||||
config
|
config
|
||||||
);
|
);
|
||||||
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
final GroupByQueryRunnerFactory factory = factoryAndCloser.lhs;
|
||||||
resourceCloser.register(factoryAndCloser.rhs);
|
RESOURCE_CLOSER.register(factoryAndCloser.rhs);
|
||||||
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
for (QueryRunner<ResultRow> runner : QueryRunnerTestHelper.makeQueryRunners(factory)) {
|
||||||
final String testName = StringUtils.format(
|
final String testName = StringUtils.format(
|
||||||
"config=%s, runner=%s",
|
"config=%s, runner=%s",
|
||||||
|
@ -153,7 +153,7 @@ public class FixedBucketsHistogramGroupByQueryTest
|
||||||
@After
|
@After
|
||||||
public void teardown() throws IOException
|
public void teardown() throws IOException
|
||||||
{
|
{
|
||||||
resourceCloser.close();
|
RESOURCE_CLOSER.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -170,18 +170,18 @@ public class FixedBucketsHistogramGroupByQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupByQuery query = new GroupByQuery.Builder()
|
GroupByQuery query = new GroupByQuery.Builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||||
QueryRunnerTestHelper.marketDimension,
|
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||||
"marketalias"
|
"marketalias"
|
||||||
))
|
))
|
||||||
.setInterval(QueryRunnerTestHelper.fullOnInterval)
|
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL)
|
||||||
.setLimitSpec(
|
.setLimitSpec(
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||||
.setPostAggregatorSpecs(
|
.setPostAggregatorSpecs(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
||||||
|
@ -231,18 +231,18 @@ public class FixedBucketsHistogramGroupByQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupByQuery query = new GroupByQuery.Builder()
|
GroupByQuery query = new GroupByQuery.Builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setGranularity(QueryRunnerTestHelper.allGran).setDimensions(new DefaultDimensionSpec(
|
.setGranularity(QueryRunnerTestHelper.ALL_GRAN).setDimensions(new DefaultDimensionSpec(
|
||||||
QueryRunnerTestHelper.marketDimension,
|
QueryRunnerTestHelper.MARKET_DIMENSION,
|
||||||
"marketalias"
|
"marketalias"
|
||||||
))
|
))
|
||||||
.setInterval(QueryRunnerTestHelper.fullOnInterval)
|
.setInterval(QueryRunnerTestHelper.FULL_ON_INTERVAL)
|
||||||
.setLimitSpec(
|
.setLimitSpec(
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
Collections.singletonList(new OrderByColumnSpec("marketalias", OrderByColumnSpec.Direction.DESCENDING)),
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
).setAggregatorSpecs(QueryRunnerTestHelper.rowsCount, aggFactory)
|
).setAggregatorSpecs(QueryRunnerTestHelper.ROWS_COUNT, aggFactory)
|
||||||
.setPostAggregatorSpecs(
|
.setPostAggregatorSpecs(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
new QuantilePostAggregator("quantile", "quantile", 0.5f)
|
||||||
|
|
|
@ -54,12 +54,12 @@ import java.util.Map;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class FixedBucketsHistogramTopNQueryTest
|
public class FixedBucketsHistogramTopNQueryTest
|
||||||
{
|
{
|
||||||
private static final Closer resourceCloser = Closer.create();
|
private static final Closer RESOURCE_CLOSER = Closer.create();
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void teardown() throws IOException
|
public static void teardown() throws IOException
|
||||||
{
|
{
|
||||||
resourceCloser.close();
|
RESOURCE_CLOSER.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Parameterized.Parameters(name = "{0}")
|
@Parameterized.Parameters(name = "{0}")
|
||||||
|
@ -70,8 +70,8 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
"TopNQueryRunnerFactory-bufferPool",
|
"TopNQueryRunnerFactory-bufferPool",
|
||||||
() -> ByteBuffer.allocate(2000)
|
() -> ByteBuffer.allocate(2000)
|
||||||
);
|
);
|
||||||
resourceCloser.register(defaultPool);
|
RESOURCE_CLOSER.register(defaultPool);
|
||||||
resourceCloser.register(customPool);
|
RESOURCE_CLOSER.register(customPool);
|
||||||
|
|
||||||
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
return QueryRunnerTestHelper.transformToConstructionFeeder(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
|
@ -122,16 +122,16 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
);
|
);
|
||||||
|
|
||||||
TopNQuery query = new TopNQueryBuilder()
|
TopNQuery query = new TopNQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||||
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
.metric(QueryRunnerTestHelper.dependentPostAggMetric)
|
||||||
.threshold(4)
|
.threshold(4)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
QueryRunnerTestHelper.commonDoubleAggregators,
|
QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS,
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||||
new DoubleMinAggregatorFactory("minIndex", "index"),
|
new DoubleMinAggregatorFactory("minIndex", "index"),
|
||||||
|
@ -141,8 +141,8 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.postAggregators(
|
.postAggregators(
|
||||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||||
QueryRunnerTestHelper.dependentPostAgg,
|
QueryRunnerTestHelper.DEPENDENT_POST_AGG,
|
||||||
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
new QuantilePostAggregator("quantile", "histo", 0.5f)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
@ -153,7 +153,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
new TopNResultValue(
|
new TopNResultValue(
|
||||||
Arrays.<Map<String, Object>>asList(
|
Arrays.<Map<String, Object>>asList(
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "total_market")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market")
|
||||||
.put("rows", 186L)
|
.put("rows", 186L)
|
||||||
.put("index", 215679.82879638672D)
|
.put("index", 215679.82879638672D)
|
||||||
.put("addRowsIndexConstant", 215866.82879638672D)
|
.put("addRowsIndexConstant", 215866.82879638672D)
|
||||||
|
@ -180,7 +180,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
.build(),
|
.build(),
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "upfront")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront")
|
||||||
.put("rows", 186L)
|
.put("rows", 186L)
|
||||||
.put("index", 192046.1060180664D)
|
.put("index", 192046.1060180664D)
|
||||||
.put("addRowsIndexConstant", 192233.1060180664D)
|
.put("addRowsIndexConstant", 192233.1060180664D)
|
||||||
|
@ -207,7 +207,7 @@ public class FixedBucketsHistogramTopNQueryTest
|
||||||
)
|
)
|
||||||
.build(),
|
.build(),
|
||||||
ImmutableMap.<String, Object>builder()
|
ImmutableMap.<String, Object>builder()
|
||||||
.put(QueryRunnerTestHelper.marketDimension, "spot")
|
.put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot")
|
||||||
.put("rows", 837L)
|
.put("rows", 837L)
|
||||||
.put("index", 95606.57232284546D)
|
.put("index", 95606.57232284546D)
|
||||||
.put("addRowsIndexConstant", 96444.57232284546D)
|
.put("addRowsIndexConstant", 96444.57232284546D)
|
||||||
|
|
|
@ -64,8 +64,8 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||||
public class TestKafkaExtractionCluster
|
public class TestKafkaExtractionCluster
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(TestKafkaExtractionCluster.class);
|
private static final Logger log = new Logger(TestKafkaExtractionCluster.class);
|
||||||
private static final String topicName = "testTopic";
|
private static final String TOPIC_NAME = "testTopic";
|
||||||
private static final Map<String, String> kafkaProperties = new HashMap<>();
|
private static final Map<String, String> KAFKA_PROPERTIES = new HashMap<>();
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
public TemporaryFolder temporaryFolder = new TemporaryFolder();
|
||||||
|
@ -81,7 +81,7 @@ public class TestKafkaExtractionCluster
|
||||||
private static List<ProducerRecord<byte[], byte[]>> generateRecords()
|
private static List<ProducerRecord<byte[], byte[]>> generateRecords()
|
||||||
{
|
{
|
||||||
return ImmutableList.of(
|
return ImmutableList.of(
|
||||||
new ProducerRecord<>(topicName, 0,
|
new ProducerRecord<>(TOPIC_NAME, 0,
|
||||||
StringUtils.toUtf8("abcdefg"),
|
StringUtils.toUtf8("abcdefg"),
|
||||||
StringUtils.toUtf8("abcdefg")));
|
StringUtils.toUtf8("abcdefg")));
|
||||||
}
|
}
|
||||||
|
@ -131,7 +131,7 @@ public class TestKafkaExtractionCluster
|
||||||
|
|
||||||
final KafkaLookupExtractorFactory kafkaLookupExtractorFactory = new KafkaLookupExtractorFactory(
|
final KafkaLookupExtractorFactory kafkaLookupExtractorFactory = new KafkaLookupExtractorFactory(
|
||||||
null,
|
null,
|
||||||
topicName,
|
TOPIC_NAME,
|
||||||
consumerProperties
|
consumerProperties
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -149,7 +149,7 @@ public class TestKafkaExtractionCluster
|
||||||
@Nonnull
|
@Nonnull
|
||||||
private Map<String, String> getConsumerProperties()
|
private Map<String, String> getConsumerProperties()
|
||||||
{
|
{
|
||||||
final Map<String, String> props = new HashMap<>(kafkaProperties);
|
final Map<String, String> props = new HashMap<>(KAFKA_PROPERTIES);
|
||||||
int port = kafkaServer.socketServer().config().port();
|
int port = kafkaServer.socketServer().config().port();
|
||||||
props.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
props.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
||||||
return props;
|
return props;
|
||||||
|
@ -168,7 +168,7 @@ public class TestKafkaExtractionCluster
|
||||||
private KafkaConfig getBrokerProperties() throws IOException
|
private KafkaConfig getBrokerProperties() throws IOException
|
||||||
{
|
{
|
||||||
final Properties serverProperties = new Properties();
|
final Properties serverProperties = new Properties();
|
||||||
serverProperties.putAll(kafkaProperties);
|
serverProperties.putAll(KAFKA_PROPERTIES);
|
||||||
serverProperties.put("broker.id", "0");
|
serverProperties.put("broker.id", "0");
|
||||||
serverProperties.put("zookeeper.connect", zkServer.getConnectString());
|
serverProperties.put("zookeeper.connect", zkServer.getConnectString());
|
||||||
serverProperties.put("port", String.valueOf(ThreadLocalRandom.current().nextInt(9999) + 10000));
|
serverProperties.put("port", String.valueOf(ThreadLocalRandom.current().nextInt(9999) + 10000));
|
||||||
|
@ -193,13 +193,13 @@ public class TestKafkaExtractionCluster
|
||||||
private Properties makeProducerProperties()
|
private Properties makeProducerProperties()
|
||||||
{
|
{
|
||||||
final Properties kafkaProducerProperties = new Properties();
|
final Properties kafkaProducerProperties = new Properties();
|
||||||
kafkaProducerProperties.putAll(kafkaProperties);
|
kafkaProducerProperties.putAll(KAFKA_PROPERTIES);
|
||||||
int port = kafkaServer.socketServer().config().port();
|
int port = kafkaServer.socketServer().config().port();
|
||||||
kafkaProducerProperties.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
kafkaProducerProperties.put("bootstrap.servers", StringUtils.format("127.0.0.1:%d", port));
|
||||||
kafkaProducerProperties.put("key.serializer", ByteArraySerializer.class.getName());
|
kafkaProducerProperties.put("key.serializer", ByteArraySerializer.class.getName());
|
||||||
kafkaProducerProperties.put("value.serializer", ByteArraySerializer.class.getName());
|
kafkaProducerProperties.put("value.serializer", ByteArraySerializer.class.getName());
|
||||||
kafkaProducerProperties.put("acks", "all");
|
kafkaProducerProperties.put("acks", "all");
|
||||||
kafkaProperties.put("request.required.acks", "1");
|
KAFKA_PROPERTIES.put("request.required.acks", "1");
|
||||||
return kafkaProducerProperties;
|
return kafkaProducerProperties;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +222,7 @@ public class TestKafkaExtractionCluster
|
||||||
long events = factory.getCompletedEventCount();
|
long events = factory.getCompletedEventCount();
|
||||||
|
|
||||||
log.info("------------------------- Sending foo bar -------------------------------");
|
log.info("------------------------- Sending foo bar -------------------------------");
|
||||||
producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar")));
|
producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("foo"), StringUtils.toUtf8("bar")));
|
||||||
|
|
||||||
long start = System.currentTimeMillis();
|
long start = System.currentTimeMillis();
|
||||||
while (events == factory.getCompletedEventCount()) {
|
while (events == factory.getCompletedEventCount()) {
|
||||||
|
@ -241,7 +241,7 @@ public class TestKafkaExtractionCluster
|
||||||
events = factory.getCompletedEventCount();
|
events = factory.getCompletedEventCount();
|
||||||
|
|
||||||
log.info("------------------------- Sending baz bat -------------------------------");
|
log.info("------------------------- Sending baz bat -------------------------------");
|
||||||
producer.send(new ProducerRecord<>(topicName, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat")));
|
producer.send(new ProducerRecord<>(TOPIC_NAME, StringUtils.toUtf8("baz"), StringUtils.toUtf8("bat")));
|
||||||
while (events == factory.getCompletedEventCount()) {
|
while (events == factory.getCompletedEventCount()) {
|
||||||
Thread.sleep(10);
|
Thread.sleep(10);
|
||||||
if (System.currentTimeMillis() > start + 60_000) {
|
if (System.currentTimeMillis() > start + 60_000) {
|
||||||
|
|
|
@ -72,7 +72,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
||||||
@Rule
|
@Rule
|
||||||
public ExpectedException expectedException = ExpectedException.none();
|
public ExpectedException expectedException = ExpectedException.none();
|
||||||
|
|
||||||
private static final ObjectMapper objectMapper = new DefaultObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper();
|
||||||
private static final String TEST_ID = "test-id";
|
private static final String TEST_ID = "test-id";
|
||||||
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
||||||
private static final String TEST_HOST = "test-host";
|
private static final String TEST_HOST = "test-host";
|
||||||
|
@ -111,7 +111,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
||||||
response = createMock(HttpResponse.class);
|
response = createMock(HttpResponse.class);
|
||||||
headers = createMock(HttpHeaders.class);
|
headers = createMock(HttpHeaders.class);
|
||||||
|
|
||||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
|
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider);
|
||||||
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
||||||
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
||||||
.anyTimes();
|
.anyTimes();
|
||||||
|
@ -285,7 +285,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
||||||
@Test
|
@Test
|
||||||
public void testGetCurrentOffsetsWithRetry() throws Exception
|
public void testGetCurrentOffsetsWithRetry() throws Exception
|
||||||
{
|
{
|
||||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
|
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3);
|
||||||
|
|
||||||
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
||||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
||||||
|
@ -330,7 +330,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
||||||
expectedException.expect(RuntimeException.class);
|
expectedException.expect(RuntimeException.class);
|
||||||
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
||||||
|
|
||||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||||
|
|
||||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
||||||
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
||||||
|
@ -385,7 +385,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
|
||||||
@Test
|
@Test
|
||||||
public void testGetStartTime() throws Exception
|
public void testGetStartTime() throws Exception
|
||||||
{
|
{
|
||||||
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
client = new TestableKafkaIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||||
DateTime now = DateTimes.nowUtc();
|
DateTime now = DateTimes.nowUtc();
|
||||||
|
|
||||||
Capture<Request> captured = Capture.newInstance();
|
Capture<Request> captured = Capture.newInstance();
|
||||||
|
|
|
@ -54,7 +54,7 @@ public class KafkaRecordSupplierTest
|
||||||
private static long poll_timeout_millis = 1000;
|
private static long poll_timeout_millis = 1000;
|
||||||
private static int pollRetry = 5;
|
private static int pollRetry = 5;
|
||||||
private static int topicPosFix = 0;
|
private static int topicPosFix = 0;
|
||||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||||
|
|
||||||
private static TestingCluster zkServer;
|
private static TestingCluster zkServer;
|
||||||
private static TestBroker kafkaServer;
|
private static TestBroker kafkaServer;
|
||||||
|
@ -194,7 +194,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
||||||
|
|
||||||
|
@ -224,7 +224,7 @@ public class KafkaRecordSupplierTest
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
properties,
|
properties,
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
|
|
||||||
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
Assert.assertTrue(recordSupplier.getAssignment().isEmpty());
|
||||||
|
@ -255,7 +255,7 @@ public class KafkaRecordSupplierTest
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
properties,
|
properties,
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
|
@ -289,7 +289,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seekToEarliest(partitions);
|
recordSupplier.seekToEarliest(partitions);
|
||||||
|
@ -330,7 +330,7 @@ public class KafkaRecordSupplierTest
|
||||||
|
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seekToEarliest(partitions);
|
recordSupplier.seekToEarliest(partitions);
|
||||||
|
@ -401,7 +401,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seekToEarliest(partitions);
|
recordSupplier.seekToEarliest(partitions);
|
||||||
|
@ -444,7 +444,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seekToEarliest(partitions);
|
recordSupplier.seekToEarliest(partitions);
|
||||||
|
@ -477,7 +477,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
|
|
||||||
|
@ -503,7 +503,7 @@ public class KafkaRecordSupplierTest
|
||||||
);
|
);
|
||||||
|
|
||||||
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
KafkaRecordSupplier recordSupplier = new KafkaRecordSupplier(
|
||||||
kafkaServer.consumerProperties(), objectMapper);
|
kafkaServer.consumerProperties(), OBJECT_MAPPER);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seekToEarliest(partitions);
|
recordSupplier.seekToEarliest(partitions);
|
||||||
|
|
|
@ -62,11 +62,11 @@ import java.util.Map;
|
||||||
|
|
||||||
public class KafkaSamplerSpecTest
|
public class KafkaSamplerSpecTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||||
private static final String TOPIC = "sampling";
|
private static final String TOPIC = "sampling";
|
||||||
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
||||||
"test_ds",
|
"test_ds",
|
||||||
objectMapper.convertValue(
|
OBJECT_MAPPER.convertValue(
|
||||||
new StringInputRowParser(
|
new StringInputRowParser(
|
||||||
new JSONParseSpec(
|
new JSONParseSpec(
|
||||||
new TimestampSpec("timestamp", "iso", null),
|
new TimestampSpec("timestamp", "iso", null),
|
||||||
|
@ -94,7 +94,7 @@ public class KafkaSamplerSpecTest
|
||||||
},
|
},
|
||||||
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
||||||
null,
|
null,
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
|
|
||||||
private static TestingCluster zkServer;
|
private static TestingCluster zkServer;
|
||||||
|
@ -167,8 +167,8 @@ public class KafkaSamplerSpecTest
|
||||||
KafkaSamplerSpec samplerSpec = new KafkaSamplerSpec(
|
KafkaSamplerSpec samplerSpec = new KafkaSamplerSpec(
|
||||||
supervisorSpec,
|
supervisorSpec,
|
||||||
new SamplerConfig(5, null, null, null),
|
new SamplerConfig(5, null, null, null),
|
||||||
new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))),
|
new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))),
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
|
|
||||||
SamplerResponse response = samplerSpec.sample();
|
SamplerResponse response = samplerSpec.sample();
|
||||||
|
|
|
@ -122,7 +122,7 @@ import java.util.concurrent.Executor;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class KafkaSupervisorTest extends EasyMockSupport
|
public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||||
private static final String TOPIC_PREFIX = "testTopic";
|
private static final String TOPIC_PREFIX = "testTopic";
|
||||||
private static final String DATASOURCE = "testDS";
|
private static final String DATASOURCE = "testDS";
|
||||||
private static final int NUM_PARTITIONS = 3;
|
private static final int NUM_PARTITIONS = 3;
|
||||||
|
@ -237,7 +237,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
final Map<String, Object> contexts = supervisor.createIndexTasks(
|
final Map<String, Object> contexts = supervisor.createIndexTasks(
|
||||||
1,
|
1,
|
||||||
"seq",
|
"seq",
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new TreeMap<>(),
|
new TreeMap<>(),
|
||||||
new KafkaIndexTaskIOConfig(
|
new KafkaIndexTaskIOConfig(
|
||||||
0,
|
0,
|
||||||
|
@ -3393,7 +3393,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new KafkaSupervisorSpec(
|
new KafkaSupervisorSpec(
|
||||||
dataSchema,
|
dataSchema,
|
||||||
tuningConfig,
|
tuningConfig,
|
||||||
|
@ -3404,7 +3404,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new NoopServiceEmitter(),
|
new NoopServiceEmitter(),
|
||||||
new DruidMonitorSchedulerConfig(),
|
new DruidMonitorSchedulerConfig(),
|
||||||
rowIngestionMetersFactory,
|
rowIngestionMetersFactory,
|
||||||
|
@ -3500,7 +3500,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new KafkaSupervisorSpec(
|
new KafkaSupervisorSpec(
|
||||||
dataSchema,
|
dataSchema,
|
||||||
tuningConfig,
|
tuningConfig,
|
||||||
|
@ -3511,7 +3511,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new NoopServiceEmitter(),
|
new NoopServiceEmitter(),
|
||||||
new DruidMonitorSchedulerConfig(),
|
new DruidMonitorSchedulerConfig(),
|
||||||
rowIngestionMetersFactory,
|
rowIngestionMetersFactory,
|
||||||
|
@ -3584,7 +3584,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new KafkaSupervisorSpec(
|
new KafkaSupervisorSpec(
|
||||||
dataSchema,
|
dataSchema,
|
||||||
tuningConfig,
|
tuningConfig,
|
||||||
|
@ -3595,7 +3595,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
taskMaster,
|
taskMaster,
|
||||||
indexerMetadataStorageCoordinator,
|
indexerMetadataStorageCoordinator,
|
||||||
taskClientFactory,
|
taskClientFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new NoopServiceEmitter(),
|
new NoopServiceEmitter(),
|
||||||
new DruidMonitorSchedulerConfig(),
|
new DruidMonitorSchedulerConfig(),
|
||||||
rowIngestionMetersFactory,
|
rowIngestionMetersFactory,
|
||||||
|
@ -3613,7 +3613,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
|
|
||||||
return new DataSchema(
|
return new DataSchema(
|
||||||
dataSource,
|
dataSource,
|
||||||
objectMapper.convertValue(
|
OBJECT_MAPPER.convertValue(
|
||||||
new StringInputRowParser(
|
new StringInputRowParser(
|
||||||
new JSONParseSpec(
|
new JSONParseSpec(
|
||||||
new TimestampSpec("timestamp", "iso", null),
|
new TimestampSpec("timestamp", "iso", null),
|
||||||
|
@ -3636,7 +3636,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
ImmutableList.of()
|
ImmutableList.of()
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3717,7 +3717,7 @@ public class KafkaSupervisorTest extends EasyMockSupport
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
rowIngestionMetersFactory,
|
rowIngestionMetersFactory,
|
||||||
objectMapper,
|
OBJECT_MAPPER,
|
||||||
new DummyForInjectionAppenderatorsManager()
|
new DummyForInjectionAppenderatorsManager()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
||||||
@Rule
|
@Rule
|
||||||
public ExpectedException expectedException = ExpectedException.none();
|
public ExpectedException expectedException = ExpectedException.none();
|
||||||
|
|
||||||
private static final ObjectMapper objectMapper = new DefaultObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new DefaultObjectMapper();
|
||||||
private static final String TEST_ID = "test-id";
|
private static final String TEST_ID = "test-id";
|
||||||
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
private static final List<String> TEST_IDS = Arrays.asList("test-id1", "test-id2", "test-id3", "test-id4");
|
||||||
private static final String TEST_HOST = "test-host";
|
private static final String TEST_HOST = "test-host";
|
||||||
|
@ -112,7 +112,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
||||||
response = createMock(HttpResponse.class);
|
response = createMock(HttpResponse.class);
|
||||||
headers = createMock(HttpHeaders.class);
|
headers = createMock(HttpHeaders.class);
|
||||||
|
|
||||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
|
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider);
|
||||||
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
|
||||||
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
|
||||||
.anyTimes();
|
.anyTimes();
|
||||||
|
@ -286,7 +286,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
||||||
@Test
|
@Test
|
||||||
public void testGetCurrentOffsetsWithRetry() throws Exception
|
public void testGetCurrentOffsetsWithRetry() throws Exception
|
||||||
{
|
{
|
||||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
|
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 3);
|
||||||
|
|
||||||
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
|
||||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
|
||||||
|
@ -331,7 +331,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
||||||
expectedException.expect(RuntimeException.class);
|
expectedException.expect(RuntimeException.class);
|
||||||
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [404]");
|
||||||
|
|
||||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||||
|
|
||||||
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
|
||||||
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
|
||||||
|
@ -386,7 +386,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
|
||||||
@Test
|
@Test
|
||||||
public void testGetStartTime() throws Exception
|
public void testGetStartTime() throws Exception
|
||||||
{
|
{
|
||||||
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
|
client = new TestableKinesisIndexTaskClient(httpClient, OBJECT_MAPPER, taskInfoProvider, 2);
|
||||||
DateTime now = DateTimes.nowUtc();
|
DateTime now = DateTimes.nowUtc();
|
||||||
|
|
||||||
Capture<Request> captured = Capture.newInstance();
|
Capture<Request> captured = Capture.newInstance();
|
||||||
|
|
|
@ -53,7 +53,7 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class KinesisRecordSupplierTest extends EasyMockSupport
|
public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
private static final String stream = "stream";
|
private static final String STREAM = "stream";
|
||||||
private static final long POLL_TIMEOUT_MILLIS = 2000;
|
private static final long POLL_TIMEOUT_MILLIS = 2000;
|
||||||
private static final String SHARD_ID1 = "1";
|
private static final String SHARD_ID1 = "1";
|
||||||
private static final String SHARD_ID0 = "0";
|
private static final String SHARD_ID0 = "0";
|
||||||
|
@ -78,7 +78,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
private static final List<Object> ALL_RECORDS = ImmutableList.builder()
|
private static final List<Object> ALL_RECORDS = ImmutableList.builder()
|
||||||
.addAll(SHARD0_RECORDS.stream()
|
.addAll(SHARD0_RECORDS.stream()
|
||||||
.map(x -> new OrderedPartitionableRecord<>(
|
.map(x -> new OrderedPartitionableRecord<>(
|
||||||
stream,
|
STREAM,
|
||||||
SHARD_ID0,
|
SHARD_ID0,
|
||||||
x.getSequenceNumber(),
|
x.getSequenceNumber(),
|
||||||
Collections
|
Collections
|
||||||
|
@ -91,7 +91,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
.toList()))
|
.toList()))
|
||||||
.addAll(SHARD1_RECORDS.stream()
|
.addAll(SHARD1_RECORDS.stream()
|
||||||
.map(x -> new OrderedPartitionableRecord<>(
|
.map(x -> new OrderedPartitionableRecord<>(
|
||||||
stream,
|
STREAM,
|
||||||
SHARD_ID1,
|
SHARD_ID1,
|
||||||
x.getSequenceNumber(),
|
x.getSequenceNumber(),
|
||||||
Collections
|
Collections
|
||||||
|
@ -182,8 +182,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
StreamPartition.of(stream, SHARD_ID0),
|
StreamPartition.of(STREAM, SHARD_ID0),
|
||||||
StreamPartition.of(stream, SHARD_ID1)
|
StreamPartition.of(STREAM, SHARD_ID1)
|
||||||
);
|
);
|
||||||
|
|
||||||
recordSupplier = new KinesisRecordSupplier(
|
recordSupplier = new KinesisRecordSupplier(
|
||||||
|
@ -204,13 +204,13 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
|
|
||||||
Assert.assertEquals(partitions, recordSupplier.getAssignment());
|
Assert.assertEquals(partitions, recordSupplier.getAssignment());
|
||||||
Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(stream));
|
Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(STREAM));
|
||||||
Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100));
|
Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100));
|
||||||
|
|
||||||
verifyAll();
|
verifyAll();
|
||||||
|
|
||||||
final DescribeStreamRequest expectedRequest = new DescribeStreamRequest();
|
final DescribeStreamRequest expectedRequest = new DescribeStreamRequest();
|
||||||
expectedRequest.setStreamName(stream);
|
expectedRequest.setStreamName(STREAM);
|
||||||
expectedRequest.setExclusiveStartShardId("0");
|
expectedRequest.setExclusiveStartShardId("0");
|
||||||
Assert.assertEquals(expectedRequest, capturedRequest.getValue());
|
Assert.assertEquals(expectedRequest, capturedRequest.getValue());
|
||||||
}
|
}
|
||||||
|
@ -266,8 +266,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
StreamPartition.of(stream, SHARD_ID0),
|
StreamPartition.of(STREAM, SHARD_ID0),
|
||||||
StreamPartition.of(stream, SHARD_ID1)
|
StreamPartition.of(STREAM, SHARD_ID1)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
@ -338,8 +338,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
StreamPartition<String> shard0Partition = StreamPartition.of(stream, SHARD_ID0);
|
StreamPartition<String> shard0Partition = StreamPartition.of(STREAM, SHARD_ID0);
|
||||||
StreamPartition<String> shard1Partition = StreamPartition.of(stream, SHARD_ID1);
|
StreamPartition<String> shard1Partition = StreamPartition.of(STREAM, SHARD_ID1);
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
shard0Partition,
|
shard0Partition,
|
||||||
shard1Partition
|
shard1Partition
|
||||||
|
@ -405,8 +405,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
|
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
|
StreamPartition<String> shard0 = StreamPartition.of(STREAM, SHARD_ID0);
|
||||||
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
|
StreamPartition<String> shard1 = StreamPartition.of(STREAM, SHARD_ID1);
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
shard0,
|
shard0,
|
||||||
shard1
|
shard1
|
||||||
|
@ -440,8 +440,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
@Test(expected = ISE.class)
|
@Test(expected = ISE.class)
|
||||||
public void testSeekUnassigned() throws InterruptedException
|
public void testSeekUnassigned() throws InterruptedException
|
||||||
{
|
{
|
||||||
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
|
StreamPartition<String> shard0 = StreamPartition.of(STREAM, SHARD_ID0);
|
||||||
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
|
StreamPartition<String> shard1 = StreamPartition.of(STREAM, SHARD_ID1);
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
shard1
|
shard1
|
||||||
);
|
);
|
||||||
|
@ -503,7 +503,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
StreamPartition.of(stream, SHARD_ID1)
|
StreamPartition.of(STREAM, SHARD_ID1)
|
||||||
);
|
);
|
||||||
|
|
||||||
recordSupplier = new KinesisRecordSupplier(
|
recordSupplier = new KinesisRecordSupplier(
|
||||||
|
@ -520,7 +520,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
);
|
);
|
||||||
|
|
||||||
recordSupplier.assign(partitions);
|
recordSupplier.assign(partitions);
|
||||||
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "5");
|
recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "5");
|
||||||
recordSupplier.start();
|
recordSupplier.start();
|
||||||
|
|
||||||
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) {
|
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) {
|
||||||
|
@ -534,7 +534,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
firstRecord
|
firstRecord
|
||||||
);
|
);
|
||||||
|
|
||||||
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "7");
|
recordSupplier.seek(StreamPartition.of(STREAM, SHARD_ID1), "7");
|
||||||
recordSupplier.start();
|
recordSupplier.start();
|
||||||
|
|
||||||
while (recordSupplier.bufferSize() < 4) {
|
while (recordSupplier.bufferSize() < 4) {
|
||||||
|
@ -585,8 +585,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
|
||||||
replayAll();
|
replayAll();
|
||||||
|
|
||||||
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
Set<StreamPartition<String>> partitions = ImmutableSet.of(
|
||||||
StreamPartition.of(stream, SHARD_ID0),
|
StreamPartition.of(STREAM, SHARD_ID0),
|
||||||
StreamPartition.of(stream, SHARD_ID1)
|
StreamPartition.of(STREAM, SHARD_ID1)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -66,12 +66,12 @@ import java.util.Map;
|
||||||
|
|
||||||
public class KinesisSamplerSpecTest extends EasyMockSupport
|
public class KinesisSamplerSpecTest extends EasyMockSupport
|
||||||
{
|
{
|
||||||
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
|
private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper();
|
||||||
private static final String STREAM = "sampling";
|
private static final String STREAM = "sampling";
|
||||||
private static final String SHARD_ID = "1";
|
private static final String SHARD_ID = "1";
|
||||||
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
private static final DataSchema DATA_SCHEMA = new DataSchema(
|
||||||
"test_ds",
|
"test_ds",
|
||||||
objectMapper.convertValue(
|
OBJECT_MAPPER.convertValue(
|
||||||
new StringInputRowParser(
|
new StringInputRowParser(
|
||||||
new JSONParseSpec(
|
new JSONParseSpec(
|
||||||
new TimestampSpec("timestamp", "iso", null),
|
new TimestampSpec("timestamp", "iso", null),
|
||||||
|
@ -99,7 +99,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport
|
||||||
},
|
},
|
||||||
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null),
|
||||||
null,
|
null,
|
||||||
objectMapper
|
OBJECT_MAPPER
|
||||||
);
|
);
|
||||||
|
|
||||||
private final KinesisRecordSupplier recordSupplier = mock(KinesisRecordSupplier.class);
|
private final KinesisRecordSupplier recordSupplier = mock(KinesisRecordSupplier.class);
|
||||||
|
@ -183,7 +183,7 @@ public class KinesisSamplerSpecTest extends EasyMockSupport
|
||||||
KinesisSamplerSpec samplerSpec = new TestableKinesisSamplerSpec(
|
KinesisSamplerSpec samplerSpec = new TestableKinesisSamplerSpec(
|
||||||
supervisorSpec,
|
supervisorSpec,
|
||||||
new SamplerConfig(5, null, null, null),
|
new SamplerConfig(5, null, null, null),
|
||||||
new FirehoseSampler(objectMapper, new SamplerCache(MapCache.create(100000))),
|
new FirehoseSampler(OBJECT_MAPPER, new SamplerCache(MapCache.create(100000))),
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -51,7 +51,7 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public class NamespacedExtractorModuleTest
|
public class NamespacedExtractorModuleTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper mapper = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper());
|
private static final ObjectMapper MAPPER = UriExtractionNamespaceTest.registerTypes(new DefaultObjectMapper());
|
||||||
private CacheScheduler scheduler;
|
private CacheScheduler scheduler;
|
||||||
private Lifecycle lifecycle;
|
private Lifecycle lifecycle;
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ public class NamespacedExtractorModuleTest
|
||||||
{
|
{
|
||||||
final File tmpFile = temporaryFolder.newFile();
|
final File tmpFile = temporaryFolder.newFile();
|
||||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||||
}
|
}
|
||||||
final UriCacheGenerator factory = new UriCacheGenerator(
|
final UriCacheGenerator factory = new UriCacheGenerator(
|
||||||
ImmutableMap.of("file", new LocalFileTimestampVersionFinder())
|
ImmutableMap.of("file", new LocalFileTimestampVersionFinder())
|
||||||
|
@ -119,7 +119,7 @@ public class NamespacedExtractorModuleTest
|
||||||
{
|
{
|
||||||
final File tmpFile = temporaryFolder.newFile();
|
final File tmpFile = temporaryFolder.newFile();
|
||||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||||
}
|
}
|
||||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||||
tmpFile.toURI(),
|
tmpFile.toURI(),
|
||||||
|
@ -140,7 +140,7 @@ public class NamespacedExtractorModuleTest
|
||||||
{
|
{
|
||||||
final File tmpFile = temporaryFolder.newFile();
|
final File tmpFile = temporaryFolder.newFile();
|
||||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||||
}
|
}
|
||||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||||
tmpFile.toURI(),
|
tmpFile.toURI(),
|
||||||
|
@ -161,7 +161,7 @@ public class NamespacedExtractorModuleTest
|
||||||
{
|
{
|
||||||
final File tmpFile = temporaryFolder.newFile();
|
final File tmpFile = temporaryFolder.newFile();
|
||||||
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
try (Writer out = Files.newWriter(tmpFile, StandardCharsets.UTF_8)) {
|
||||||
out.write(mapper.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
out.write(MAPPER.writeValueAsString(ImmutableMap.of("foo", "bar")));
|
||||||
}
|
}
|
||||||
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
final UriExtractionNamespace namespace = new UriExtractionNamespace(
|
||||||
tmpFile.toURI(),
|
tmpFile.toURI(),
|
||||||
|
|
|
@ -70,12 +70,12 @@ public class JdbcExtractionNamespaceTest
|
||||||
@Rule
|
@Rule
|
||||||
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
|
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
|
||||||
private static final Logger log = new Logger(JdbcExtractionNamespaceTest.class);
|
private static final Logger log = new Logger(JdbcExtractionNamespaceTest.class);
|
||||||
private static final String tableName = "abstractDbRenameTest";
|
private static final String TABLE_NAME = "abstractDbRenameTest";
|
||||||
private static final String keyName = "keyName";
|
private static final String KEY_NAME = "keyName";
|
||||||
private static final String valName = "valName";
|
private static final String VAL_NAME = "valName";
|
||||||
private static final String tsColumn_ = "tsColumn";
|
private static final String TS_COLUMN = "tsColumn";
|
||||||
private static final String filterColumn = "filterColumn";
|
private static final String FILTER_COLUMN = "filterColumn";
|
||||||
private static final Map<String, String[]> renames = ImmutableMap.of(
|
private static final Map<String, String[]> RENAMES = ImmutableMap.of(
|
||||||
"foo", new String[]{"bar", "1"},
|
"foo", new String[]{"bar", "1"},
|
||||||
"bad", new String[]{"bar", "1"},
|
"bad", new String[]{"bar", "1"},
|
||||||
"how about that", new String[]{"foo", "0"},
|
"how about that", new String[]{"foo", "0"},
|
||||||
|
@ -129,22 +129,22 @@ public class JdbcExtractionNamespaceTest
|
||||||
handle.createStatement(
|
handle.createStatement(
|
||||||
StringUtils.format(
|
StringUtils.format(
|
||||||
"CREATE TABLE %s (%s TIMESTAMP, %s VARCHAR(64), %s VARCHAR(64), %s VARCHAR(64))",
|
"CREATE TABLE %s (%s TIMESTAMP, %s VARCHAR(64), %s VARCHAR(64), %s VARCHAR(64))",
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
tsColumn_,
|
TS_COLUMN,
|
||||||
filterColumn,
|
FILTER_COLUMN,
|
||||||
keyName,
|
KEY_NAME,
|
||||||
valName
|
VAL_NAME
|
||||||
)
|
)
|
||||||
).setQueryTimeout(1).execute()
|
).setQueryTimeout(1).execute()
|
||||||
);
|
);
|
||||||
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
|
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", TABLE_NAME)).setQueryTimeout(1).execute();
|
||||||
handle.commit();
|
handle.commit();
|
||||||
closer.register(new Closeable()
|
closer.register(new Closeable()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public void close() throws IOException
|
public void close() throws IOException
|
||||||
{
|
{
|
||||||
handle.createStatement("DROP TABLE " + tableName).setQueryTimeout(1).execute();
|
handle.createStatement("DROP TABLE " + TABLE_NAME).setQueryTimeout(1).execute();
|
||||||
final ListenableFuture future = setupTeardownService.submit(new Runnable()
|
final ListenableFuture future = setupTeardownService.submit(new Runnable()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
|
@ -179,7 +179,7 @@ public class JdbcExtractionNamespaceTest
|
||||||
Assert.assertEquals(0, scheduler.getActiveEntries());
|
Assert.assertEquals(0, scheduler.getActiveEntries());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
for (Map.Entry<String, String[]> entry : renames.entrySet()) {
|
for (Map.Entry<String, String[]> entry : RENAMES.entrySet()) {
|
||||||
try {
|
try {
|
||||||
String key = entry.getKey();
|
String key = entry.getKey();
|
||||||
String value = entry.getValue()[0];
|
String value = entry.getValue()[0];
|
||||||
|
@ -338,19 +338,19 @@ public class JdbcExtractionNamespaceTest
|
||||||
final String statementVal = val != null ? "'%s'" : "%s";
|
final String statementVal = val != null ? "'%s'" : "%s";
|
||||||
if (tsColumn == null) {
|
if (tsColumn == null) {
|
||||||
handle.createStatement(
|
handle.createStatement(
|
||||||
StringUtils.format("DELETE FROM %s WHERE %s='%s'", tableName, keyName, key)
|
StringUtils.format("DELETE FROM %s WHERE %s='%s'", TABLE_NAME, KEY_NAME, key)
|
||||||
).setQueryTimeout(1).execute();
|
).setQueryTimeout(1).execute();
|
||||||
query = StringUtils.format(
|
query = StringUtils.format(
|
||||||
"INSERT INTO %s (%s, %s, %s) VALUES ('%s', '%s', " + statementVal + ")",
|
"INSERT INTO %s (%s, %s, %s) VALUES ('%s', '%s', " + statementVal + ")",
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
filterColumn, keyName, valName,
|
FILTER_COLUMN, KEY_NAME, VAL_NAME,
|
||||||
filter, key, val
|
filter, key, val
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
query = StringUtils.format(
|
query = StringUtils.format(
|
||||||
"INSERT INTO %s (%s, %s, %s, %s) VALUES ('%s', '%s', '%s', " + statementVal + ")",
|
"INSERT INTO %s (%s, %s, %s, %s) VALUES ('%s', '%s', '%s', " + statementVal + ")",
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
tsColumn, filterColumn, keyName, valName,
|
tsColumn, FILTER_COLUMN, KEY_NAME, VAL_NAME,
|
||||||
updateTs, filter, key, val
|
updateTs, filter, key, val
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -367,9 +367,9 @@ public class JdbcExtractionNamespaceTest
|
||||||
{
|
{
|
||||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
keyName,
|
KEY_NAME,
|
||||||
valName,
|
VAL_NAME,
|
||||||
tsColumn,
|
tsColumn,
|
||||||
null,
|
null,
|
||||||
new Period(0)
|
new Period(0)
|
||||||
|
@ -378,7 +378,7 @@ public class JdbcExtractionNamespaceTest
|
||||||
CacheSchedulerTest.waitFor(entry);
|
CacheSchedulerTest.waitFor(entry);
|
||||||
final Map<String, String> map = entry.getCache();
|
final Map<String, String> map = entry.getCache();
|
||||||
|
|
||||||
for (Map.Entry<String, String[]> e : renames.entrySet()) {
|
for (Map.Entry<String, String[]> e : RENAMES.entrySet()) {
|
||||||
String key = e.getKey();
|
String key = e.getKey();
|
||||||
String[] val = e.getValue();
|
String[] val = e.getValue();
|
||||||
String field = val[0];
|
String field = val[0];
|
||||||
|
@ -398,18 +398,18 @@ public class JdbcExtractionNamespaceTest
|
||||||
{
|
{
|
||||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
keyName,
|
KEY_NAME,
|
||||||
valName,
|
VAL_NAME,
|
||||||
tsColumn,
|
tsColumn,
|
||||||
filterColumn + "='1'",
|
FILTER_COLUMN + "='1'",
|
||||||
new Period(0)
|
new Period(0)
|
||||||
);
|
);
|
||||||
try (CacheScheduler.Entry entry = scheduler.schedule(extractionNamespace)) {
|
try (CacheScheduler.Entry entry = scheduler.schedule(extractionNamespace)) {
|
||||||
CacheSchedulerTest.waitFor(entry);
|
CacheSchedulerTest.waitFor(entry);
|
||||||
final Map<String, String> map = entry.getCache();
|
final Map<String, String> map = entry.getCache();
|
||||||
|
|
||||||
for (Map.Entry<String, String[]> e : renames.entrySet()) {
|
for (Map.Entry<String, String[]> e : RENAMES.entrySet()) {
|
||||||
String key = e.getKey();
|
String key = e.getKey();
|
||||||
String[] val = e.getValue();
|
String[] val = e.getValue();
|
||||||
String field = val[0];
|
String field = val[0];
|
||||||
|
@ -470,9 +470,9 @@ public class JdbcExtractionNamespaceTest
|
||||||
{
|
{
|
||||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
keyName,
|
KEY_NAME,
|
||||||
valName,
|
VAL_NAME,
|
||||||
tsColumn,
|
tsColumn,
|
||||||
"some filter",
|
"some filter",
|
||||||
new Period(10)
|
new Period(10)
|
||||||
|
@ -491,9 +491,9 @@ public class JdbcExtractionNamespaceTest
|
||||||
{
|
{
|
||||||
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
final JdbcExtractionNamespace extractionNamespace = new JdbcExtractionNamespace(
|
||||||
derbyConnectorRule.getMetadataConnectorConfig(),
|
derbyConnectorRule.getMetadataConnectorConfig(),
|
||||||
tableName,
|
TABLE_NAME,
|
||||||
keyName,
|
KEY_NAME,
|
||||||
valName,
|
VAL_NAME,
|
||||||
tsColumn,
|
tsColumn,
|
||||||
null,
|
null,
|
||||||
new Period(10)
|
new Period(10)
|
||||||
|
|
|
@ -47,14 +47,14 @@ import java.util.Map;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class PollingLookupTest
|
public class PollingLookupTest
|
||||||
{
|
{
|
||||||
private static final Map<String, String> firstLookupMap = ImmutableMap.of(
|
private static final Map<String, String> FIRST_LOOKUP_MAP = ImmutableMap.of(
|
||||||
"foo", "bar",
|
"foo", "bar",
|
||||||
"bad", "bar",
|
"bad", "bar",
|
||||||
"how about that", "foo",
|
"how about that", "foo",
|
||||||
"empty string", ""
|
"empty string", ""
|
||||||
);
|
);
|
||||||
|
|
||||||
private static final Map<String, String> secondLookupMap = ImmutableMap.of(
|
private static final Map<String, String> SECOND_LOOKUP_MAP = ImmutableMap.of(
|
||||||
"new-foo", "new-bar",
|
"new-foo", "new-bar",
|
||||||
"new-bad", "new-bar"
|
"new-bad", "new-bar"
|
||||||
);
|
);
|
||||||
|
@ -71,9 +71,9 @@ public class PollingLookupTest
|
||||||
{
|
{
|
||||||
if (callNumber == 0) {
|
if (callNumber == 0) {
|
||||||
callNumber++;
|
callNumber++;
|
||||||
return firstLookupMap.entrySet();
|
return FIRST_LOOKUP_MAP.entrySet();
|
||||||
}
|
}
|
||||||
return secondLookupMap.entrySet();
|
return SECOND_LOOKUP_MAP.entrySet();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Nullable
|
@Nullable
|
||||||
|
@ -145,15 +145,15 @@ public class PollingLookupTest
|
||||||
@Test
|
@Test
|
||||||
public void testApply()
|
public void testApply()
|
||||||
{
|
{
|
||||||
assertMapLookup(firstLookupMap, pollingLookup);
|
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = POLL_PERIOD * 3)
|
@Test(timeout = POLL_PERIOD * 3)
|
||||||
public void testApplyAfterDataChange() throws InterruptedException
|
public void testApplyAfterDataChange() throws InterruptedException
|
||||||
{
|
{
|
||||||
assertMapLookup(firstLookupMap, pollingLookup);
|
assertMapLookup(FIRST_LOOKUP_MAP, pollingLookup);
|
||||||
Thread.sleep(POLL_PERIOD * 2);
|
Thread.sleep(POLL_PERIOD * 2);
|
||||||
assertMapLookup(secondLookupMap, pollingLookup);
|
assertMapLookup(SECOND_LOOKUP_MAP, pollingLookup);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -184,8 +184,8 @@ public class PollingLookupTest
|
||||||
@Test
|
@Test
|
||||||
public void testBulkApply()
|
public void testBulkApply()
|
||||||
{
|
{
|
||||||
Map<String, String> map = pollingLookup.applyAll(firstLookupMap.keySet());
|
Map<String, String> map = pollingLookup.applyAll(FIRST_LOOKUP_MAP.keySet());
|
||||||
Assert.assertEquals(firstLookupMap, Maps.transformValues(map, new Function<String, String>()
|
Assert.assertEquals(FIRST_LOOKUP_MAP, Maps.transformValues(map, new Function<String, String>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public String apply(String input)
|
public String apply(String input)
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class JdbcDataFetcherTest
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
private static final Map<String, String> lookupMap = ImmutableMap.of(
|
private static final Map<String, String> LOOKUP_MAP = ImmutableMap.of(
|
||||||
"foo", "bar",
|
"foo", "bar",
|
||||||
"bad", "bar",
|
"bad", "bar",
|
||||||
"how about that", "foo",
|
"how about that", "foo",
|
||||||
|
@ -77,7 +77,7 @@ public class JdbcDataFetcherTest
|
||||||
);
|
);
|
||||||
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
|
handle.createStatement(StringUtils.format("TRUNCATE TABLE %s", tableName)).setQueryTimeout(1).execute();
|
||||||
|
|
||||||
for (Map.Entry<String, String> entry : lookupMap.entrySet()) {
|
for (Map.Entry<String, String> entry : LOOKUP_MAP.entrySet()) {
|
||||||
insertValues(entry.getKey(), entry.getValue(), handle);
|
insertValues(entry.getKey(), entry.getValue(), handle);
|
||||||
}
|
}
|
||||||
handle.commit();
|
handle.commit();
|
||||||
|
@ -94,7 +94,7 @@ public class JdbcDataFetcherTest
|
||||||
public void testFetch()
|
public void testFetch()
|
||||||
{
|
{
|
||||||
Assert.assertEquals("null check", null, jdbcDataFetcher.fetch("baz"));
|
Assert.assertEquals("null check", null, jdbcDataFetcher.fetch("baz"));
|
||||||
assertMapLookup(lookupMap, jdbcDataFetcher);
|
assertMapLookup(LOOKUP_MAP, jdbcDataFetcher);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -102,15 +102,15 @@ public class JdbcDataFetcherTest
|
||||||
{
|
{
|
||||||
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
||||||
jdbcDataFetcher.fetchAll().forEach(mapBuilder::put);
|
jdbcDataFetcher.fetchAll().forEach(mapBuilder::put);
|
||||||
Assert.assertEquals("maps should match", lookupMap, mapBuilder.build());
|
Assert.assertEquals("maps should match", LOOKUP_MAP, mapBuilder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFetchKeys()
|
public void testFetchKeys()
|
||||||
{
|
{
|
||||||
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
ImmutableMap.Builder<String, String> mapBuilder = ImmutableMap.builder();
|
||||||
jdbcDataFetcher.fetch(lookupMap.keySet()).forEach(mapBuilder::put);
|
jdbcDataFetcher.fetch(LOOKUP_MAP.keySet()).forEach(mapBuilder::put);
|
||||||
Assert.assertEquals(lookupMap, mapBuilder.build());
|
Assert.assertEquals(LOOKUP_MAP, mapBuilder.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -57,7 +57,7 @@ public class S3DataSegmentPuller implements URIDataPuller
|
||||||
{
|
{
|
||||||
public static final int DEFAULT_RETRY_COUNT = 3;
|
public static final int DEFAULT_RETRY_COUNT = 3;
|
||||||
|
|
||||||
public static final String scheme = S3StorageDruidModule.SCHEME;
|
public static final String SCHEME = S3StorageDruidModule.SCHEME;
|
||||||
|
|
||||||
private static final Logger log = new Logger(S3DataSegmentPuller.class);
|
private static final Logger log = new Logger(S3DataSegmentPuller.class);
|
||||||
|
|
||||||
|
@ -141,8 +141,8 @@ public class S3DataSegmentPuller implements URIDataPuller
|
||||||
|
|
||||||
public static URI checkURI(URI uri)
|
public static URI checkURI(URI uri)
|
||||||
{
|
{
|
||||||
if (uri.getScheme().equalsIgnoreCase(scheme)) {
|
if (uri.getScheme().equalsIgnoreCase(SCHEME)) {
|
||||||
uri = URI.create("s3" + uri.toString().substring(scheme.length()));
|
uri = URI.create("s3" + uri.toString().substring(SCHEME.length()));
|
||||||
} else if (!"s3".equalsIgnoreCase(uri.getScheme())) {
|
} else if (!"s3".equalsIgnoreCase(uri.getScheme())) {
|
||||||
throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString());
|
throw new IAE("Don't know how to load scheme for URI [%s]", uri.toString());
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,9 +57,9 @@ import java.util.stream.Collectors;
|
||||||
*/
|
*/
|
||||||
public class StaticS3FirehoseFactoryTest
|
public class StaticS3FirehoseFactoryTest
|
||||||
{
|
{
|
||||||
private static final AmazonS3Client S3_ClIENT = EasyMock.createNiceMock(AmazonS3Client.class);
|
private static final AmazonS3Client S3_CLIENT = EasyMock.createNiceMock(AmazonS3Client.class);
|
||||||
private static final ServerSideEncryptingAmazonS3 SERVICE = new ServerSideEncryptingAmazonS3(
|
private static final ServerSideEncryptingAmazonS3 SERVICE = new ServerSideEncryptingAmazonS3(
|
||||||
S3_ClIENT,
|
S3_CLIENT,
|
||||||
new NoopServerSideEncryption()
|
new NoopServerSideEncryption()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ public class StaticS3FirehoseFactoryTest
|
||||||
uris.sort(Comparator.comparing(URI::toString));
|
uris.sort(Comparator.comparing(URI::toString));
|
||||||
|
|
||||||
uris.forEach(StaticS3FirehoseFactoryTest::addExpectedObjject);
|
uris.forEach(StaticS3FirehoseFactoryTest::addExpectedObjject);
|
||||||
EasyMock.replay(S3_ClIENT);
|
EasyMock.replay(S3_CLIENT);
|
||||||
|
|
||||||
final StaticS3FirehoseFactory factory = new StaticS3FirehoseFactory(
|
final StaticS3FirehoseFactory factory = new StaticS3FirehoseFactory(
|
||||||
SERVICE,
|
SERVICE,
|
||||||
|
|
|
@ -51,7 +51,7 @@ import java.util.Set;
|
||||||
|
|
||||||
public class S3DataSegmentMoverTest
|
public class S3DataSegmentMoverTest
|
||||||
{
|
{
|
||||||
private static final DataSegment sourceSegment = new DataSegment(
|
private static final DataSegment SOURCE_SEGMENT = new DataSegment(
|
||||||
"test",
|
"test",
|
||||||
Intervals.of("2013-01-01/2013-01-02"),
|
Intervals.of("2013-01-01/2013-01-02"),
|
||||||
"1",
|
"1",
|
||||||
|
@ -80,7 +80,7 @@ public class S3DataSegmentMoverTest
|
||||||
);
|
);
|
||||||
|
|
||||||
DataSegment movedSegment = mover.move(
|
DataSegment movedSegment = mover.move(
|
||||||
sourceSegment,
|
SOURCE_SEGMENT,
|
||||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -102,7 +102,7 @@ public class S3DataSegmentMoverTest
|
||||||
);
|
);
|
||||||
|
|
||||||
DataSegment movedSegment = mover.move(
|
DataSegment movedSegment = mover.move(
|
||||||
sourceSegment,
|
SOURCE_SEGMENT,
|
||||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ public class S3DataSegmentMoverTest
|
||||||
S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
|
S3DataSegmentMover mover = new S3DataSegmentMover(mockS3Client, new S3DataSegmentPusherConfig());
|
||||||
|
|
||||||
mover.move(
|
mover.move(
|
||||||
sourceSegment,
|
SOURCE_SEGMENT,
|
||||||
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
ImmutableMap.of("baseKey", "targetBaseKey", "bucket", "archive")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ import java.util.Set;
|
||||||
|
|
||||||
public class S3DataSegmentPusherConfigTest
|
public class S3DataSegmentPusherConfigTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerialization() throws IOException
|
public void testSerialization() throws IOException
|
||||||
|
@ -41,8 +41,8 @@ public class S3DataSegmentPusherConfigTest
|
||||||
String jsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
String jsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
||||||
+ "\"disableAcl\":false,\"maxListingLength\":2000,\"useS3aSchema\":false}";
|
+ "\"disableAcl\":false,\"maxListingLength\":2000,\"useS3aSchema\":false}";
|
||||||
|
|
||||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||||
Assert.assertEquals(jsonConfig, jsonMapper.writeValueAsString(config));
|
Assert.assertEquals(jsonConfig, JSON_MAPPER.writeValueAsString(config));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -52,8 +52,8 @@ public class S3DataSegmentPusherConfigTest
|
||||||
String expectedJsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
String expectedJsonConfig = "{\"bucket\":\"bucket1\",\"baseKey\":\"dataSource1\","
|
||||||
+ "\"disableAcl\":false,\"maxListingLength\":1000,\"useS3aSchema\":false}";
|
+ "\"disableAcl\":false,\"maxListingLength\":1000,\"useS3aSchema\":false}";
|
||||||
|
|
||||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||||
Assert.assertEquals(expectedJsonConfig, jsonMapper.writeValueAsString(config));
|
Assert.assertEquals(expectedJsonConfig, JSON_MAPPER.writeValueAsString(config));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -63,7 +63,7 @@ public class S3DataSegmentPusherConfigTest
|
||||||
+ "\"disableAcl\":false,\"maxListingLength\":-1}";
|
+ "\"disableAcl\":false,\"maxListingLength\":-1}";
|
||||||
Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
|
Validator validator = Validation.buildDefaultValidatorFactory().getValidator();
|
||||||
|
|
||||||
S3DataSegmentPusherConfig config = jsonMapper.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
S3DataSegmentPusherConfig config = JSON_MAPPER.readValue(jsonConfig, S3DataSegmentPusherConfig.class);
|
||||||
Set<ConstraintViolation<S3DataSegmentPusherConfig>> violations = validator.validate(config);
|
Set<ConstraintViolation<S3DataSegmentPusherConfig>> violations = validator.validate(config);
|
||||||
Assert.assertEquals(1, violations.size());
|
Assert.assertEquals(1, violations.size());
|
||||||
ConstraintViolation violation = Iterators.getOnlyElement(violations.iterator());
|
ConstraintViolation violation = Iterators.getOnlyElement(violations.iterator());
|
||||||
|
|
|
@ -38,7 +38,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public class VarianceSerde extends ComplexMetricSerde
|
public class VarianceSerde extends ComplexMetricSerde
|
||||||
{
|
{
|
||||||
private static final Ordering<VarianceAggregatorCollector> comparator =
|
private static final Ordering<VarianceAggregatorCollector> COMPARATOR =
|
||||||
Ordering.from(VarianceAggregatorCollector.COMPARATOR).nullsFirst();
|
Ordering.from(VarianceAggregatorCollector.COMPARATOR).nullsFirst();
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -114,7 +114,7 @@ public class VarianceSerde extends ComplexMetricSerde
|
||||||
@Override
|
@Override
|
||||||
public int compare(VarianceAggregatorCollector o1, VarianceAggregatorCollector o2)
|
public int compare(VarianceAggregatorCollector o1, VarianceAggregatorCollector o2)
|
||||||
{
|
{
|
||||||
return comparator.compare(o1, o2);
|
return COMPARATOR.compare(o1, o2);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
|
||||||
public class VarianceAggregatorCollectorTest
|
public class VarianceAggregatorCollectorTest
|
||||||
{
|
{
|
||||||
private static final float[] market_upfront = new float[]{
|
private static final float[] MARKET_UPFRONT = new float[]{
|
||||||
800.0f, 800.0f, 826.0602f, 1564.6177f, 1006.4021f, 869.64374f, 809.04175f, 1458.4027f, 852.4375f, 879.9881f,
|
800.0f, 800.0f, 826.0602f, 1564.6177f, 1006.4021f, 869.64374f, 809.04175f, 1458.4027f, 852.4375f, 879.9881f,
|
||||||
950.1468f, 712.7746f, 846.2675f, 682.8855f, 1109.875f, 594.3817f, 870.1159f, 677.511f, 1410.2781f, 1219.4321f,
|
950.1468f, 712.7746f, 846.2675f, 682.8855f, 1109.875f, 594.3817f, 870.1159f, 677.511f, 1410.2781f, 1219.4321f,
|
||||||
979.306f, 1224.5016f, 1215.5898f, 716.6092f, 1301.0233f, 786.3633f, 989.9315f, 1609.0967f, 1023.2952f, 1367.6381f,
|
979.306f, 1224.5016f, 1215.5898f, 716.6092f, 1301.0233f, 786.3633f, 989.9315f, 1609.0967f, 1023.2952f, 1367.6381f,
|
||||||
|
@ -57,7 +57,7 @@ public class VarianceAggregatorCollectorTest
|
||||||
989.0328f, 744.7446f, 1166.4012f, 753.105f, 962.7312f, 780.272f
|
989.0328f, 744.7446f, 1166.4012f, 753.105f, 962.7312f, 780.272f
|
||||||
};
|
};
|
||||||
|
|
||||||
private static final float[] market_total_market = new float[]{
|
private static final float[] MARKET_TOTAL_MARKET = new float[]{
|
||||||
1000.0f, 1000.0f, 1040.9456f, 1689.0128f, 1049.142f, 1073.4766f, 1007.36554f, 1545.7089f, 1016.9652f, 1077.6127f,
|
1000.0f, 1000.0f, 1040.9456f, 1689.0128f, 1049.142f, 1073.4766f, 1007.36554f, 1545.7089f, 1016.9652f, 1077.6127f,
|
||||||
1075.0896f, 953.9954f, 1022.7833f, 937.06195f, 1156.7448f, 849.8775f, 1066.208f, 904.34064f, 1240.5255f,
|
1075.0896f, 953.9954f, 1022.7833f, 937.06195f, 1156.7448f, 849.8775f, 1066.208f, 904.34064f, 1240.5255f,
|
||||||
1343.2325f, 1088.9431f, 1349.2544f, 1102.8667f, 939.2441f, 1109.8754f, 997.99457f, 1037.4495f, 1686.4197f,
|
1343.2325f, 1088.9431f, 1349.2544f, 1102.8667f, 939.2441f, 1109.8754f, 997.99457f, 1037.4495f, 1686.4197f,
|
||||||
|
@ -85,7 +85,7 @@ public class VarianceAggregatorCollectorTest
|
||||||
public void testVariance()
|
public void testVariance()
|
||||||
{
|
{
|
||||||
Random random = ThreadLocalRandom.current();
|
Random random = ThreadLocalRandom.current();
|
||||||
for (float[] values : Arrays.asList(market_upfront, market_total_market)) {
|
for (float[] values : Arrays.asList(MARKET_UPFRONT, MARKET_TOTAL_MARKET)) {
|
||||||
double sum = 0;
|
double sum = 0;
|
||||||
for (float f : values) {
|
for (float f : values) {
|
||||||
sum += f;
|
sum += f;
|
||||||
|
|
|
@ -96,12 +96,12 @@ public class VarianceGroupByQueryTest
|
||||||
{
|
{
|
||||||
GroupByQuery query = GroupByQuery
|
GroupByQuery query = GroupByQuery
|
||||||
.builder()
|
.builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
.setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||||
.setAggregatorSpecs(VarianceTestHelper.indexVarianceAggr)
|
.setAggregatorSpecs(VarianceTestHelper.INDEX_VARIANCE_AGGR)
|
||||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
|
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
.setGranularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
VarianceTestHelper.RowBuilder builder =
|
VarianceTestHelper.RowBuilder builder =
|
||||||
|
@ -138,16 +138,16 @@ public class VarianceGroupByQueryTest
|
||||||
{
|
{
|
||||||
GroupByQuery query = GroupByQuery
|
GroupByQuery query = GroupByQuery
|
||||||
.builder()
|
.builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
.setQuerySegmentSpec(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||||
.setAggregatorSpecs(
|
.setAggregatorSpecs(
|
||||||
QueryRunnerTestHelper.rowsCount,
|
QueryRunnerTestHelper.ROWS_COUNT,
|
||||||
VarianceTestHelper.indexVarianceAggr,
|
VarianceTestHelper.INDEX_VARIANCE_AGGR,
|
||||||
new LongSumAggregatorFactory("idx", "index")
|
new LongSumAggregatorFactory("idx", "index")
|
||||||
)
|
)
|
||||||
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
|
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
.setGranularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
VarianceTestHelper.RowBuilder builder =
|
VarianceTestHelper.RowBuilder builder =
|
||||||
|
@ -188,20 +188,20 @@ public class VarianceGroupByQueryTest
|
||||||
|
|
||||||
GroupByQuery query = GroupByQuery
|
GroupByQuery query = GroupByQuery
|
||||||
.builder()
|
.builder()
|
||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.setInterval("2011-04-02/2011-04-04")
|
.setInterval("2011-04-02/2011-04-04")
|
||||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||||
.setAggregatorSpecs(
|
.setAggregatorSpecs(
|
||||||
QueryRunnerTestHelper.rowsCount,
|
QueryRunnerTestHelper.ROWS_COUNT,
|
||||||
QueryRunnerTestHelper.indexLongSum,
|
QueryRunnerTestHelper.INDEX_LONG_SUM,
|
||||||
VarianceTestHelper.indexVarianceAggr
|
VarianceTestHelper.INDEX_VARIANCE_AGGR
|
||||||
)
|
)
|
||||||
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr))
|
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR))
|
||||||
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
|
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
|
||||||
.setHavingSpec(
|
.setHavingSpec(
|
||||||
new OrHavingSpec(
|
new OrHavingSpec(
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new GreaterThanHavingSpec(VarianceTestHelper.stddevOfIndexMetric, 15L) // 3 rows
|
new GreaterThanHavingSpec(VarianceTestHelper.STD_DEV_OF_INDEX_METRIC, 15L) // 3 rows
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -220,7 +220,7 @@ public class VarianceGroupByQueryTest
|
||||||
new DefaultLimitSpec(
|
new DefaultLimitSpec(
|
||||||
Collections.singletonList(
|
Collections.singletonList(
|
||||||
OrderByColumnSpec.asc(
|
OrderByColumnSpec.asc(
|
||||||
VarianceTestHelper.stddevOfIndexMetric
|
VarianceTestHelper.STD_DEV_OF_INDEX_METRIC
|
||||||
)
|
)
|
||||||
), 2
|
), 2
|
||||||
)
|
)
|
||||||
|
|
|
@ -48,26 +48,26 @@ public class VarianceTestHelper extends QueryRunnerTestHelper
|
||||||
module.configure(null);
|
module.configure(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final String indexVarianceMetric = "index_var";
|
public static final String INDEX_VARIANCE_METRIC = "index_var";
|
||||||
|
|
||||||
public static final VarianceAggregatorFactory indexVarianceAggr = new VarianceAggregatorFactory(
|
public static final VarianceAggregatorFactory INDEX_VARIANCE_AGGR = new VarianceAggregatorFactory(
|
||||||
indexVarianceMetric,
|
INDEX_VARIANCE_METRIC,
|
||||||
indexMetric
|
INDEX_METRIC
|
||||||
);
|
);
|
||||||
|
|
||||||
public static final String stddevOfIndexMetric = "index_stddev";
|
public static final String STD_DEV_OF_INDEX_METRIC = "index_stddev";
|
||||||
|
|
||||||
public static final PostAggregator stddevOfIndexPostAggr = new StandardDeviationPostAggregator(
|
public static final PostAggregator STD_DEV_OF_INDEX_POST_AGGR = new StandardDeviationPostAggregator(
|
||||||
stddevOfIndexMetric,
|
STD_DEV_OF_INDEX_METRIC,
|
||||||
indexVarianceMetric,
|
INDEX_VARIANCE_METRIC,
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
|
||||||
public static final List<AggregatorFactory> commonPlusVarAggregators = Arrays.asList(
|
public static final List<AggregatorFactory> COMMON_PLUS_VAR_AGGREGATORS = Arrays.asList(
|
||||||
rowsCount,
|
ROWS_COUNT,
|
||||||
indexDoubleSum,
|
INDEX_DOUBLE_SUM,
|
||||||
qualityUniques,
|
QUALITY_UNIQUES,
|
||||||
indexVarianceAggr
|
INDEX_VARIANCE_AGGR
|
||||||
);
|
);
|
||||||
|
|
||||||
public static class RowBuilder
|
public static class RowBuilder
|
||||||
|
|
|
@ -70,14 +70,14 @@ public class VarianceTimeseriesQueryTest
|
||||||
public void testTimeseriesWithNullFilterOnNonExistentDimension()
|
public void testTimeseriesWithNullFilterOnNonExistentDimension()
|
||||||
{
|
{
|
||||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.dayGran)
|
.granularity(QueryRunnerTestHelper.DAY_GRAN)
|
||||||
.filters("bobby", null)
|
.filters("bobby", null)
|
||||||
.intervals(QueryRunnerTestHelper.firstToThird)
|
.intervals(QueryRunnerTestHelper.FIRST_TO_THIRD)
|
||||||
.aggregators(VarianceTestHelper.commonPlusVarAggregators)
|
.aggregators(VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS)
|
||||||
.postAggregators(
|
.postAggregators(
|
||||||
QueryRunnerTestHelper.addRowsIndexConstant,
|
QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT,
|
||||||
VarianceTestHelper.stddevOfIndexPostAggr
|
VarianceTestHelper.STD_DEV_OF_INDEX_POST_AGGR
|
||||||
)
|
)
|
||||||
.descending(descending)
|
.descending(descending)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -68,16 +68,16 @@ public class VarianceTopNQueryTest
|
||||||
public void testFullOnTopNOverUniques()
|
public void testFullOnTopNOverUniques()
|
||||||
{
|
{
|
||||||
TopNQuery query = new TopNQueryBuilder()
|
TopNQuery query = new TopNQueryBuilder()
|
||||||
.dataSource(QueryRunnerTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.DATA_SOURCE)
|
||||||
.granularity(QueryRunnerTestHelper.allGran)
|
.granularity(QueryRunnerTestHelper.ALL_GRAN)
|
||||||
.dimension(QueryRunnerTestHelper.marketDimension)
|
.dimension(QueryRunnerTestHelper.MARKET_DIMENSION)
|
||||||
.metric(QueryRunnerTestHelper.uniqueMetric)
|
.metric(QueryRunnerTestHelper.UNIQUE_METRIC)
|
||||||
.threshold(3)
|
.threshold(3)
|
||||||
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
|
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
|
||||||
.aggregators(
|
.aggregators(
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
Iterables.concat(
|
Iterables.concat(
|
||||||
VarianceTestHelper.commonPlusVarAggregators,
|
VarianceTestHelper.COMMON_PLUS_VAR_AGGREGATORS,
|
||||||
Lists.newArrayList(
|
Lists.newArrayList(
|
||||||
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
new DoubleMaxAggregatorFactory("maxIndex", "index"),
|
||||||
new DoubleMinAggregatorFactory("minIndex", "index")
|
new DoubleMinAggregatorFactory("minIndex", "index")
|
||||||
|
@ -85,7 +85,7 @@ public class VarianceTopNQueryTest
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.postAggregators(QueryRunnerTestHelper.addRowsIndexConstant)
|
.postAggregators(QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(
|
List<Result<TopNResultValue>> expectedResults = Collections.singletonList(
|
||||||
|
|
|
@ -23,266 +23,266 @@ package org.apache.druid.hll;
|
||||||
*/
|
*/
|
||||||
public class ByteBitLookup
|
public class ByteBitLookup
|
||||||
{
|
{
|
||||||
public static final byte[] lookup;
|
public static final byte[] LOOKUP;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
lookup = new byte[256];
|
LOOKUP = new byte[256];
|
||||||
|
|
||||||
lookup[0] = 0;
|
LOOKUP[0] = 0;
|
||||||
lookup[1] = 1;
|
LOOKUP[1] = 1;
|
||||||
lookup[2] = 2;
|
LOOKUP[2] = 2;
|
||||||
lookup[3] = 1;
|
LOOKUP[3] = 1;
|
||||||
lookup[4] = 3;
|
LOOKUP[4] = 3;
|
||||||
lookup[5] = 1;
|
LOOKUP[5] = 1;
|
||||||
lookup[6] = 2;
|
LOOKUP[6] = 2;
|
||||||
lookup[7] = 1;
|
LOOKUP[7] = 1;
|
||||||
lookup[8] = 4;
|
LOOKUP[8] = 4;
|
||||||
lookup[9] = 1;
|
LOOKUP[9] = 1;
|
||||||
lookup[10] = 2;
|
LOOKUP[10] = 2;
|
||||||
lookup[11] = 1;
|
LOOKUP[11] = 1;
|
||||||
lookup[12] = 3;
|
LOOKUP[12] = 3;
|
||||||
lookup[13] = 1;
|
LOOKUP[13] = 1;
|
||||||
lookup[14] = 2;
|
LOOKUP[14] = 2;
|
||||||
lookup[15] = 1;
|
LOOKUP[15] = 1;
|
||||||
lookup[16] = 5;
|
LOOKUP[16] = 5;
|
||||||
lookup[17] = 1;
|
LOOKUP[17] = 1;
|
||||||
lookup[18] = 2;
|
LOOKUP[18] = 2;
|
||||||
lookup[19] = 1;
|
LOOKUP[19] = 1;
|
||||||
lookup[20] = 3;
|
LOOKUP[20] = 3;
|
||||||
lookup[21] = 1;
|
LOOKUP[21] = 1;
|
||||||
lookup[22] = 2;
|
LOOKUP[22] = 2;
|
||||||
lookup[23] = 1;
|
LOOKUP[23] = 1;
|
||||||
lookup[24] = 4;
|
LOOKUP[24] = 4;
|
||||||
lookup[25] = 1;
|
LOOKUP[25] = 1;
|
||||||
lookup[26] = 2;
|
LOOKUP[26] = 2;
|
||||||
lookup[27] = 1;
|
LOOKUP[27] = 1;
|
||||||
lookup[28] = 3;
|
LOOKUP[28] = 3;
|
||||||
lookup[29] = 1;
|
LOOKUP[29] = 1;
|
||||||
lookup[30] = 2;
|
LOOKUP[30] = 2;
|
||||||
lookup[31] = 1;
|
LOOKUP[31] = 1;
|
||||||
lookup[32] = 6;
|
LOOKUP[32] = 6;
|
||||||
lookup[33] = 1;
|
LOOKUP[33] = 1;
|
||||||
lookup[34] = 2;
|
LOOKUP[34] = 2;
|
||||||
lookup[35] = 1;
|
LOOKUP[35] = 1;
|
||||||
lookup[36] = 3;
|
LOOKUP[36] = 3;
|
||||||
lookup[37] = 1;
|
LOOKUP[37] = 1;
|
||||||
lookup[38] = 2;
|
LOOKUP[38] = 2;
|
||||||
lookup[39] = 1;
|
LOOKUP[39] = 1;
|
||||||
lookup[40] = 4;
|
LOOKUP[40] = 4;
|
||||||
lookup[41] = 1;
|
LOOKUP[41] = 1;
|
||||||
lookup[42] = 2;
|
LOOKUP[42] = 2;
|
||||||
lookup[43] = 1;
|
LOOKUP[43] = 1;
|
||||||
lookup[44] = 3;
|
LOOKUP[44] = 3;
|
||||||
lookup[45] = 1;
|
LOOKUP[45] = 1;
|
||||||
lookup[46] = 2;
|
LOOKUP[46] = 2;
|
||||||
lookup[47] = 1;
|
LOOKUP[47] = 1;
|
||||||
lookup[48] = 5;
|
LOOKUP[48] = 5;
|
||||||
lookup[49] = 1;
|
LOOKUP[49] = 1;
|
||||||
lookup[50] = 2;
|
LOOKUP[50] = 2;
|
||||||
lookup[51] = 1;
|
LOOKUP[51] = 1;
|
||||||
lookup[52] = 3;
|
LOOKUP[52] = 3;
|
||||||
lookup[53] = 1;
|
LOOKUP[53] = 1;
|
||||||
lookup[54] = 2;
|
LOOKUP[54] = 2;
|
||||||
lookup[55] = 1;
|
LOOKUP[55] = 1;
|
||||||
lookup[56] = 4;
|
LOOKUP[56] = 4;
|
||||||
lookup[57] = 1;
|
LOOKUP[57] = 1;
|
||||||
lookup[58] = 2;
|
LOOKUP[58] = 2;
|
||||||
lookup[59] = 1;
|
LOOKUP[59] = 1;
|
||||||
lookup[60] = 3;
|
LOOKUP[60] = 3;
|
||||||
lookup[61] = 1;
|
LOOKUP[61] = 1;
|
||||||
lookup[62] = 2;
|
LOOKUP[62] = 2;
|
||||||
lookup[63] = 1;
|
LOOKUP[63] = 1;
|
||||||
lookup[64] = 7;
|
LOOKUP[64] = 7;
|
||||||
lookup[65] = 1;
|
LOOKUP[65] = 1;
|
||||||
lookup[66] = 2;
|
LOOKUP[66] = 2;
|
||||||
lookup[67] = 1;
|
LOOKUP[67] = 1;
|
||||||
lookup[68] = 3;
|
LOOKUP[68] = 3;
|
||||||
lookup[69] = 1;
|
LOOKUP[69] = 1;
|
||||||
lookup[70] = 2;
|
LOOKUP[70] = 2;
|
||||||
lookup[71] = 1;
|
LOOKUP[71] = 1;
|
||||||
lookup[72] = 4;
|
LOOKUP[72] = 4;
|
||||||
lookup[73] = 1;
|
LOOKUP[73] = 1;
|
||||||
lookup[74] = 2;
|
LOOKUP[74] = 2;
|
||||||
lookup[75] = 1;
|
LOOKUP[75] = 1;
|
||||||
lookup[76] = 3;
|
LOOKUP[76] = 3;
|
||||||
lookup[77] = 1;
|
LOOKUP[77] = 1;
|
||||||
lookup[78] = 2;
|
LOOKUP[78] = 2;
|
||||||
lookup[79] = 1;
|
LOOKUP[79] = 1;
|
||||||
lookup[80] = 5;
|
LOOKUP[80] = 5;
|
||||||
lookup[81] = 1;
|
LOOKUP[81] = 1;
|
||||||
lookup[82] = 2;
|
LOOKUP[82] = 2;
|
||||||
lookup[83] = 1;
|
LOOKUP[83] = 1;
|
||||||
lookup[84] = 3;
|
LOOKUP[84] = 3;
|
||||||
lookup[85] = 1;
|
LOOKUP[85] = 1;
|
||||||
lookup[86] = 2;
|
LOOKUP[86] = 2;
|
||||||
lookup[87] = 1;
|
LOOKUP[87] = 1;
|
||||||
lookup[88] = 4;
|
LOOKUP[88] = 4;
|
||||||
lookup[89] = 1;
|
LOOKUP[89] = 1;
|
||||||
lookup[90] = 2;
|
LOOKUP[90] = 2;
|
||||||
lookup[91] = 1;
|
LOOKUP[91] = 1;
|
||||||
lookup[92] = 3;
|
LOOKUP[92] = 3;
|
||||||
lookup[93] = 1;
|
LOOKUP[93] = 1;
|
||||||
lookup[94] = 2;
|
LOOKUP[94] = 2;
|
||||||
lookup[95] = 1;
|
LOOKUP[95] = 1;
|
||||||
lookup[96] = 6;
|
LOOKUP[96] = 6;
|
||||||
lookup[97] = 1;
|
LOOKUP[97] = 1;
|
||||||
lookup[98] = 2;
|
LOOKUP[98] = 2;
|
||||||
lookup[99] = 1;
|
LOOKUP[99] = 1;
|
||||||
lookup[100] = 3;
|
LOOKUP[100] = 3;
|
||||||
lookup[101] = 1;
|
LOOKUP[101] = 1;
|
||||||
lookup[102] = 2;
|
LOOKUP[102] = 2;
|
||||||
lookup[103] = 1;
|
LOOKUP[103] = 1;
|
||||||
lookup[104] = 4;
|
LOOKUP[104] = 4;
|
||||||
lookup[105] = 1;
|
LOOKUP[105] = 1;
|
||||||
lookup[106] = 2;
|
LOOKUP[106] = 2;
|
||||||
lookup[107] = 1;
|
LOOKUP[107] = 1;
|
||||||
lookup[108] = 3;
|
LOOKUP[108] = 3;
|
||||||
lookup[109] = 1;
|
LOOKUP[109] = 1;
|
||||||
lookup[110] = 2;
|
LOOKUP[110] = 2;
|
||||||
lookup[111] = 1;
|
LOOKUP[111] = 1;
|
||||||
lookup[112] = 5;
|
LOOKUP[112] = 5;
|
||||||
lookup[113] = 1;
|
LOOKUP[113] = 1;
|
||||||
lookup[114] = 2;
|
LOOKUP[114] = 2;
|
||||||
lookup[115] = 1;
|
LOOKUP[115] = 1;
|
||||||
lookup[116] = 3;
|
LOOKUP[116] = 3;
|
||||||
lookup[117] = 1;
|
LOOKUP[117] = 1;
|
||||||
lookup[118] = 2;
|
LOOKUP[118] = 2;
|
||||||
lookup[119] = 1;
|
LOOKUP[119] = 1;
|
||||||
lookup[120] = 4;
|
LOOKUP[120] = 4;
|
||||||
lookup[121] = 1;
|
LOOKUP[121] = 1;
|
||||||
lookup[122] = 2;
|
LOOKUP[122] = 2;
|
||||||
lookup[123] = 1;
|
LOOKUP[123] = 1;
|
||||||
lookup[124] = 3;
|
LOOKUP[124] = 3;
|
||||||
lookup[125] = 1;
|
LOOKUP[125] = 1;
|
||||||
lookup[126] = 2;
|
LOOKUP[126] = 2;
|
||||||
lookup[127] = 1;
|
LOOKUP[127] = 1;
|
||||||
lookup[128] = 8;
|
LOOKUP[128] = 8;
|
||||||
lookup[129] = 1;
|
LOOKUP[129] = 1;
|
||||||
lookup[130] = 2;
|
LOOKUP[130] = 2;
|
||||||
lookup[131] = 1;
|
LOOKUP[131] = 1;
|
||||||
lookup[132] = 3;
|
LOOKUP[132] = 3;
|
||||||
lookup[133] = 1;
|
LOOKUP[133] = 1;
|
||||||
lookup[134] = 2;
|
LOOKUP[134] = 2;
|
||||||
lookup[135] = 1;
|
LOOKUP[135] = 1;
|
||||||
lookup[136] = 4;
|
LOOKUP[136] = 4;
|
||||||
lookup[137] = 1;
|
LOOKUP[137] = 1;
|
||||||
lookup[138] = 2;
|
LOOKUP[138] = 2;
|
||||||
lookup[139] = 1;
|
LOOKUP[139] = 1;
|
||||||
lookup[140] = 3;
|
LOOKUP[140] = 3;
|
||||||
lookup[141] = 1;
|
LOOKUP[141] = 1;
|
||||||
lookup[142] = 2;
|
LOOKUP[142] = 2;
|
||||||
lookup[143] = 1;
|
LOOKUP[143] = 1;
|
||||||
lookup[144] = 5;
|
LOOKUP[144] = 5;
|
||||||
lookup[145] = 1;
|
LOOKUP[145] = 1;
|
||||||
lookup[146] = 2;
|
LOOKUP[146] = 2;
|
||||||
lookup[147] = 1;
|
LOOKUP[147] = 1;
|
||||||
lookup[148] = 3;
|
LOOKUP[148] = 3;
|
||||||
lookup[149] = 1;
|
LOOKUP[149] = 1;
|
||||||
lookup[150] = 2;
|
LOOKUP[150] = 2;
|
||||||
lookup[151] = 1;
|
LOOKUP[151] = 1;
|
||||||
lookup[152] = 4;
|
LOOKUP[152] = 4;
|
||||||
lookup[153] = 1;
|
LOOKUP[153] = 1;
|
||||||
lookup[154] = 2;
|
LOOKUP[154] = 2;
|
||||||
lookup[155] = 1;
|
LOOKUP[155] = 1;
|
||||||
lookup[156] = 3;
|
LOOKUP[156] = 3;
|
||||||
lookup[157] = 1;
|
LOOKUP[157] = 1;
|
||||||
lookup[158] = 2;
|
LOOKUP[158] = 2;
|
||||||
lookup[159] = 1;
|
LOOKUP[159] = 1;
|
||||||
lookup[160] = 6;
|
LOOKUP[160] = 6;
|
||||||
lookup[161] = 1;
|
LOOKUP[161] = 1;
|
||||||
lookup[162] = 2;
|
LOOKUP[162] = 2;
|
||||||
lookup[163] = 1;
|
LOOKUP[163] = 1;
|
||||||
lookup[164] = 3;
|
LOOKUP[164] = 3;
|
||||||
lookup[165] = 1;
|
LOOKUP[165] = 1;
|
||||||
lookup[166] = 2;
|
LOOKUP[166] = 2;
|
||||||
lookup[167] = 1;
|
LOOKUP[167] = 1;
|
||||||
lookup[168] = 4;
|
LOOKUP[168] = 4;
|
||||||
lookup[169] = 1;
|
LOOKUP[169] = 1;
|
||||||
lookup[170] = 2;
|
LOOKUP[170] = 2;
|
||||||
lookup[171] = 1;
|
LOOKUP[171] = 1;
|
||||||
lookup[172] = 3;
|
LOOKUP[172] = 3;
|
||||||
lookup[173] = 1;
|
LOOKUP[173] = 1;
|
||||||
lookup[174] = 2;
|
LOOKUP[174] = 2;
|
||||||
lookup[175] = 1;
|
LOOKUP[175] = 1;
|
||||||
lookup[176] = 5;
|
LOOKUP[176] = 5;
|
||||||
lookup[177] = 1;
|
LOOKUP[177] = 1;
|
||||||
lookup[178] = 2;
|
LOOKUP[178] = 2;
|
||||||
lookup[179] = 1;
|
LOOKUP[179] = 1;
|
||||||
lookup[180] = 3;
|
LOOKUP[180] = 3;
|
||||||
lookup[181] = 1;
|
LOOKUP[181] = 1;
|
||||||
lookup[182] = 2;
|
LOOKUP[182] = 2;
|
||||||
lookup[183] = 1;
|
LOOKUP[183] = 1;
|
||||||
lookup[184] = 4;
|
LOOKUP[184] = 4;
|
||||||
lookup[185] = 1;
|
LOOKUP[185] = 1;
|
||||||
lookup[186] = 2;
|
LOOKUP[186] = 2;
|
||||||
lookup[187] = 1;
|
LOOKUP[187] = 1;
|
||||||
lookup[188] = 3;
|
LOOKUP[188] = 3;
|
||||||
lookup[189] = 1;
|
LOOKUP[189] = 1;
|
||||||
lookup[190] = 2;
|
LOOKUP[190] = 2;
|
||||||
lookup[191] = 1;
|
LOOKUP[191] = 1;
|
||||||
lookup[192] = 7;
|
LOOKUP[192] = 7;
|
||||||
lookup[193] = 1;
|
LOOKUP[193] = 1;
|
||||||
lookup[194] = 2;
|
LOOKUP[194] = 2;
|
||||||
lookup[195] = 1;
|
LOOKUP[195] = 1;
|
||||||
lookup[196] = 3;
|
LOOKUP[196] = 3;
|
||||||
lookup[197] = 1;
|
LOOKUP[197] = 1;
|
||||||
lookup[198] = 2;
|
LOOKUP[198] = 2;
|
||||||
lookup[199] = 1;
|
LOOKUP[199] = 1;
|
||||||
lookup[200] = 4;
|
LOOKUP[200] = 4;
|
||||||
lookup[201] = 1;
|
LOOKUP[201] = 1;
|
||||||
lookup[202] = 2;
|
LOOKUP[202] = 2;
|
||||||
lookup[203] = 1;
|
LOOKUP[203] = 1;
|
||||||
lookup[204] = 3;
|
LOOKUP[204] = 3;
|
||||||
lookup[205] = 1;
|
LOOKUP[205] = 1;
|
||||||
lookup[206] = 2;
|
LOOKUP[206] = 2;
|
||||||
lookup[207] = 1;
|
LOOKUP[207] = 1;
|
||||||
lookup[208] = 5;
|
LOOKUP[208] = 5;
|
||||||
lookup[209] = 1;
|
LOOKUP[209] = 1;
|
||||||
lookup[210] = 2;
|
LOOKUP[210] = 2;
|
||||||
lookup[211] = 1;
|
LOOKUP[211] = 1;
|
||||||
lookup[212] = 3;
|
LOOKUP[212] = 3;
|
||||||
lookup[213] = 1;
|
LOOKUP[213] = 1;
|
||||||
lookup[214] = 2;
|
LOOKUP[214] = 2;
|
||||||
lookup[215] = 1;
|
LOOKUP[215] = 1;
|
||||||
lookup[216] = 4;
|
LOOKUP[216] = 4;
|
||||||
lookup[217] = 1;
|
LOOKUP[217] = 1;
|
||||||
lookup[218] = 2;
|
LOOKUP[218] = 2;
|
||||||
lookup[219] = 1;
|
LOOKUP[219] = 1;
|
||||||
lookup[220] = 3;
|
LOOKUP[220] = 3;
|
||||||
lookup[221] = 1;
|
LOOKUP[221] = 1;
|
||||||
lookup[222] = 2;
|
LOOKUP[222] = 2;
|
||||||
lookup[223] = 1;
|
LOOKUP[223] = 1;
|
||||||
lookup[224] = 6;
|
LOOKUP[224] = 6;
|
||||||
lookup[225] = 1;
|
LOOKUP[225] = 1;
|
||||||
lookup[226] = 2;
|
LOOKUP[226] = 2;
|
||||||
lookup[227] = 1;
|
LOOKUP[227] = 1;
|
||||||
lookup[228] = 3;
|
LOOKUP[228] = 3;
|
||||||
lookup[229] = 1;
|
LOOKUP[229] = 1;
|
||||||
lookup[230] = 2;
|
LOOKUP[230] = 2;
|
||||||
lookup[231] = 1;
|
LOOKUP[231] = 1;
|
||||||
lookup[232] = 4;
|
LOOKUP[232] = 4;
|
||||||
lookup[233] = 1;
|
LOOKUP[233] = 1;
|
||||||
lookup[234] = 2;
|
LOOKUP[234] = 2;
|
||||||
lookup[235] = 1;
|
LOOKUP[235] = 1;
|
||||||
lookup[236] = 3;
|
LOOKUP[236] = 3;
|
||||||
lookup[237] = 1;
|
LOOKUP[237] = 1;
|
||||||
lookup[238] = 2;
|
LOOKUP[238] = 2;
|
||||||
lookup[239] = 1;
|
LOOKUP[239] = 1;
|
||||||
lookup[240] = 5;
|
LOOKUP[240] = 5;
|
||||||
lookup[241] = 1;
|
LOOKUP[241] = 1;
|
||||||
lookup[242] = 2;
|
LOOKUP[242] = 2;
|
||||||
lookup[243] = 1;
|
LOOKUP[243] = 1;
|
||||||
lookup[244] = 3;
|
LOOKUP[244] = 3;
|
||||||
lookup[245] = 1;
|
LOOKUP[245] = 1;
|
||||||
lookup[246] = 2;
|
LOOKUP[246] = 2;
|
||||||
lookup[247] = 1;
|
LOOKUP[247] = 1;
|
||||||
lookup[248] = 4;
|
LOOKUP[248] = 4;
|
||||||
lookup[249] = 1;
|
LOOKUP[249] = 1;
|
||||||
lookup[250] = 2;
|
LOOKUP[250] = 2;
|
||||||
lookup[251] = 1;
|
LOOKUP[251] = 1;
|
||||||
lookup[252] = 3;
|
LOOKUP[252] = 3;
|
||||||
lookup[253] = 1;
|
LOOKUP[253] = 1;
|
||||||
lookup[254] = 2;
|
LOOKUP[254] = 2;
|
||||||
lookup[255] = 1;
|
LOOKUP[255] = 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,29 +64,29 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
public static final double HIGH_CORRECTION_THRESHOLD = TWO_TO_THE_SIXTY_FOUR / 30.0d;
|
public static final double HIGH_CORRECTION_THRESHOLD = TWO_TO_THE_SIXTY_FOUR / 30.0d;
|
||||||
public static final double CORRECTION_PARAMETER = ALPHA * NUM_BUCKETS * NUM_BUCKETS;
|
public static final double CORRECTION_PARAMETER = ALPHA * NUM_BUCKETS * NUM_BUCKETS;
|
||||||
|
|
||||||
private static final int bucketMask = 0x7ff;
|
private static final int BUCKET_MASK = 0x7ff;
|
||||||
private static final int minBytesRequired = 10;
|
private static final int MIN_BYTES_REQUIRED = 10;
|
||||||
private static final int bitsPerBucket = 4;
|
private static final int BITS_PER_BUCKET = 4;
|
||||||
private static final int range = (int) Math.pow(2, bitsPerBucket) - 1;
|
private static final int RANGE = (int) Math.pow(2, BITS_PER_BUCKET) - 1;
|
||||||
|
|
||||||
private static final double[][] minNumRegisterLookup = new double[64][256];
|
private static final double[][] MIN_NUM_REGISTER_LOOKUP = new double[64][256];
|
||||||
|
|
||||||
static {
|
static {
|
||||||
for (int registerOffset = 0; registerOffset < 64; ++registerOffset) {
|
for (int registerOffset = 0; registerOffset < 64; ++registerOffset) {
|
||||||
for (int register = 0; register < 256; ++register) {
|
for (int register = 0; register < 256; ++register) {
|
||||||
final int upper = ((register & 0xf0) >> 4) + registerOffset;
|
final int upper = ((register & 0xf0) >> 4) + registerOffset;
|
||||||
final int lower = (register & 0x0f) + registerOffset;
|
final int lower = (register & 0x0f) + registerOffset;
|
||||||
minNumRegisterLookup[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower);
|
MIN_NUM_REGISTER_LOOKUP[registerOffset][register] = 1.0d / Math.pow(2, upper) + 1.0d / Math.pow(2, lower);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2)
|
// we have to keep track of the number of zeroes in each of the two halves of the byte register (0, 1, or 2)
|
||||||
private static final int[] numZeroLookup = new int[256];
|
private static final int[] NUM_ZERO_LOOKUP = new int[256];
|
||||||
|
|
||||||
static {
|
static {
|
||||||
for (int i = 0; i < numZeroLookup.length; ++i) {
|
for (int i = 0; i < NUM_ZERO_LOOKUP.length; ++i) {
|
||||||
numZeroLookup[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0);
|
NUM_ZERO_LOOKUP[i] = (((i & 0xf0) == 0) ? 1 : 0) + (((i & 0x0f) == 0) ? 1 : 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -181,7 +181,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
short position = copy.getShort();
|
short position = copy.getShort();
|
||||||
final int register = (int) copy.get() & 0xff;
|
final int register = (int) copy.get() & 0xff;
|
||||||
if (overflowValue != 0 && position == overflowPosition) {
|
if (overflowValue != 0 && position == overflowPosition) {
|
||||||
int upperNibble = ((register & 0xf0) >>> bitsPerBucket) + minNum;
|
int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum;
|
||||||
int lowerNibble = (register & 0x0f) + minNum;
|
int lowerNibble = (register & 0x0f) + minNum;
|
||||||
if (isUpperNibble) {
|
if (isUpperNibble) {
|
||||||
upperNibble = Math.max(upperNibble, overflowValue);
|
upperNibble = Math.max(upperNibble, overflowValue);
|
||||||
|
@ -191,8 +191,8 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
||||||
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
||||||
} else {
|
} else {
|
||||||
e += minNumRegisterLookup[minNum][register];
|
e += MIN_NUM_REGISTER_LOOKUP[minNum][register];
|
||||||
zeroCount += numZeroLookup[register];
|
zeroCount += NUM_ZERO_LOOKUP[register];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -215,7 +215,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
while (copy.hasRemaining()) {
|
while (copy.hasRemaining()) {
|
||||||
final int register = (int) copy.get() & 0xff;
|
final int register = (int) copy.get() & 0xff;
|
||||||
if (overflowValue != 0 && position == overflowPosition) {
|
if (overflowValue != 0 && position == overflowPosition) {
|
||||||
int upperNibble = ((register & 0xf0) >>> bitsPerBucket) + minNum;
|
int upperNibble = ((register & 0xf0) >>> BITS_PER_BUCKET) + minNum;
|
||||||
int lowerNibble = (register & 0x0f) + minNum;
|
int lowerNibble = (register & 0x0f) + minNum;
|
||||||
if (isUpperNibble) {
|
if (isUpperNibble) {
|
||||||
upperNibble = Math.max(upperNibble, overflowValue);
|
upperNibble = Math.max(upperNibble, overflowValue);
|
||||||
|
@ -225,8 +225,8 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
e += 1.0d / Math.pow(2, upperNibble) + 1.0d / Math.pow(2, lowerNibble);
|
||||||
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
zeroCount += (((upperNibble & 0xf0) == 0) ? 1 : 0) + (((lowerNibble & 0x0f) == 0) ? 1 : 0);
|
||||||
} else {
|
} else {
|
||||||
e += minNumRegisterLookup[minNum][register];
|
e += MIN_NUM_REGISTER_LOOKUP[minNum][register];
|
||||||
zeroCount += numZeroLookup[register];
|
zeroCount += NUM_ZERO_LOOKUP[register];
|
||||||
}
|
}
|
||||||
position++;
|
position++;
|
||||||
}
|
}
|
||||||
|
@ -302,20 +302,20 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
|
|
||||||
public void add(byte[] hashedValue)
|
public void add(byte[] hashedValue)
|
||||||
{
|
{
|
||||||
if (hashedValue.length < minBytesRequired) {
|
if (hashedValue.length < MIN_BYTES_REQUIRED) {
|
||||||
throw new IAE("Insufficient bytes, need[%d] got [%d]", minBytesRequired, hashedValue.length);
|
throw new IAE("Insufficient bytes, need[%d] got [%d]", MIN_BYTES_REQUIRED, hashedValue.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
estimatedCardinality = null;
|
estimatedCardinality = null;
|
||||||
|
|
||||||
final ByteBuffer buffer = ByteBuffer.wrap(hashedValue);
|
final ByteBuffer buffer = ByteBuffer.wrap(hashedValue);
|
||||||
|
|
||||||
short bucket = (short) (buffer.getShort(hashedValue.length - 2) & bucketMask);
|
short bucket = (short) (buffer.getShort(hashedValue.length - 2) & BUCKET_MASK);
|
||||||
|
|
||||||
byte positionOf1 = 0;
|
byte positionOf1 = 0;
|
||||||
|
|
||||||
for (int i = 0; i < 8; ++i) {
|
for (int i = 0; i < 8; ++i) {
|
||||||
byte lookupVal = ByteBitLookup.lookup[UnsignedBytes.toInt(hashedValue[i])];
|
byte lookupVal = ByteBitLookup.LOOKUP[UnsignedBytes.toInt(hashedValue[i])];
|
||||||
switch (lookupVal) {
|
switch (lookupVal) {
|
||||||
case 0:
|
case 0:
|
||||||
positionOf1 += (byte) 8;
|
positionOf1 += (byte) 8;
|
||||||
|
@ -341,10 +341,10 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
// discard everything outside of the range we care about
|
// discard everything outside of the range we care about
|
||||||
if (positionOf1 <= registerOffset) {
|
if (positionOf1 <= registerOffset) {
|
||||||
return;
|
return;
|
||||||
} else if (positionOf1 > (registerOffset + range)) {
|
} else if (positionOf1 > (registerOffset + RANGE)) {
|
||||||
final byte currMax = getMaxOverflowValue();
|
final byte currMax = getMaxOverflowValue();
|
||||||
if (positionOf1 > currMax) {
|
if (positionOf1 > currMax) {
|
||||||
if (currMax <= (registerOffset + range)) {
|
if (currMax <= (registerOffset + RANGE)) {
|
||||||
// this could be optimized by having an add without sanity checks
|
// this could be optimized by having an add without sanity checks
|
||||||
add(getMaxOverflowRegister(), currMax);
|
add(getMaxOverflowRegister(), currMax);
|
||||||
}
|
}
|
||||||
|
@ -665,7 +665,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
final int position = getPayloadBytePosition() + (short) (bucket >> 1);
|
final int position = getPayloadBytePosition() + (short) (bucket >> 1);
|
||||||
final boolean isUpperNibble = ((bucket & 0x1) == 0);
|
final boolean isUpperNibble = ((bucket & 0x1) == 0);
|
||||||
|
|
||||||
final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << bitsPerBucket) : positionOf1;
|
final byte shiftedPositionOf1 = (isUpperNibble) ? (byte) (positionOf1 << BITS_PER_BUCKET) : positionOf1;
|
||||||
|
|
||||||
if (storageBuffer.remaining() != getNumBytesForDenseStorage()) {
|
if (storageBuffer.remaining() != getNumBytesForDenseStorage()) {
|
||||||
convertToDenseStorage();
|
convertToDenseStorage();
|
||||||
|
@ -712,7 +712,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
||||||
final int lowerNibble = currVal & 0x0f;
|
final int lowerNibble = currVal & 0x0f;
|
||||||
|
|
||||||
// subtract the differences so that the nibbles align
|
// subtract the differences so that the nibbles align
|
||||||
final int otherUpper = (byteToAdd & 0xf0) - (offsetDiff << bitsPerBucket);
|
final int otherUpper = (byteToAdd & 0xf0) - (offsetDiff << BITS_PER_BUCKET);
|
||||||
final int otherLower = (byteToAdd & 0x0f) - offsetDiff;
|
final int otherLower = (byteToAdd & 0x0f) - offsetDiff;
|
||||||
|
|
||||||
final int newUpper = Math.max(upperNibble, otherUpper);
|
final int newUpper = Math.max(upperNibble, otherUpper);
|
||||||
|
|
|
@ -41,12 +41,12 @@ public class VersionOneHyperLogLogCollector extends HyperLogLogCollector
|
||||||
public static final int HEADER_NUM_BYTES = 7;
|
public static final int HEADER_NUM_BYTES = 7;
|
||||||
public static final int NUM_BYTES_FOR_DENSE_STORAGE = NUM_BYTES_FOR_BUCKETS + HEADER_NUM_BYTES;
|
public static final int NUM_BYTES_FOR_DENSE_STORAGE = NUM_BYTES_FOR_BUCKETS + HEADER_NUM_BYTES;
|
||||||
|
|
||||||
private static final ByteBuffer defaultStorageBuffer = ByteBuffer.wrap(new byte[]{VERSION, 0, 0, 0, 0, 0, 0})
|
private static final ByteBuffer DEFAULT_STORAGE_BUFFER = ByteBuffer.wrap(new byte[]{VERSION, 0, 0, 0, 0, 0, 0})
|
||||||
.asReadOnlyBuffer();
|
.asReadOnlyBuffer();
|
||||||
|
|
||||||
VersionOneHyperLogLogCollector()
|
VersionOneHyperLogLogCollector()
|
||||||
{
|
{
|
||||||
super(defaultStorageBuffer.duplicate());
|
super(DEFAULT_STORAGE_BUFFER.duplicate());
|
||||||
}
|
}
|
||||||
|
|
||||||
VersionOneHyperLogLogCollector(ByteBuffer buffer)
|
VersionOneHyperLogLogCollector(ByteBuffer buffer)
|
||||||
|
|
|
@ -55,7 +55,7 @@ public class HyperLogLogSerdeBenchmarkTest extends AbstractBenchmark
|
||||||
this.NUM_HASHES = num_hashes;
|
this.NUM_HASHES = num_hashes;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final HashFunction hashFunction = Hashing.murmur3_128();
|
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128();
|
||||||
|
|
||||||
@Parameterized.Parameters
|
@Parameterized.Parameters
|
||||||
public static Collection<Object[]> getParameters()
|
public static Collection<Object[]> getParameters()
|
||||||
|
@ -216,13 +216,13 @@ public class HyperLogLogSerdeBenchmarkTest extends AbstractBenchmark
|
||||||
{
|
{
|
||||||
Random rand = new Random(758190);
|
Random rand = new Random(758190);
|
||||||
for (long i = 0; i < NUM_HASHES; ++i) {
|
for (long i = 0; i < NUM_HASHES; ++i) {
|
||||||
collector.add(hashFunction.hashLong(rand.nextLong()).asBytes());
|
collector.add(HASH_FUNCTION.hashLong(rand.nextLong()).asBytes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static HashCode getHash(final ByteBuffer byteBuffer)
|
private static HashCode getHash(final ByteBuffer byteBuffer)
|
||||||
{
|
{
|
||||||
Hasher hasher = hashFunction.newHasher();
|
Hasher hasher = HASH_FUNCTION.newHasher();
|
||||||
while (byteBuffer.position() < byteBuffer.limit()) {
|
while (byteBuffer.position() < byteBuffer.limit()) {
|
||||||
hasher.putByte(byteBuffer.get());
|
hasher.putByte(byteBuffer.get());
|
||||||
}
|
}
|
||||||
|
|
|
@ -83,7 +83,7 @@ import java.util.SortedSet;
|
||||||
*/
|
*/
|
||||||
public class HadoopDruidIndexerConfig
|
public class HadoopDruidIndexerConfig
|
||||||
{
|
{
|
||||||
private static final Injector injector;
|
private static final Injector INJECTOR;
|
||||||
|
|
||||||
public static final String CONFIG_PROPERTY = "druid.indexer.config";
|
public static final String CONFIG_PROPERTY = "druid.indexer.config";
|
||||||
public static final Charset JAVA_NATIVE_CHARSET = Charset.forName("Unicode");
|
public static final Charset JAVA_NATIVE_CHARSET = Charset.forName("Unicode");
|
||||||
|
@ -99,7 +99,7 @@ public class HadoopDruidIndexerConfig
|
||||||
|
|
||||||
|
|
||||||
static {
|
static {
|
||||||
injector = Initialization.makeInjectorWithModules(
|
INJECTOR = Initialization.makeInjectorWithModules(
|
||||||
GuiceInjectors.makeStartupInjector(),
|
GuiceInjectors.makeStartupInjector(),
|
||||||
ImmutableList.of(
|
ImmutableList.of(
|
||||||
new Module()
|
new Module()
|
||||||
|
@ -118,11 +118,11 @@ public class HadoopDruidIndexerConfig
|
||||||
new IndexingHadoopModule()
|
new IndexingHadoopModule()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
JSON_MAPPER = injector.getInstance(ObjectMapper.class);
|
JSON_MAPPER = INJECTOR.getInstance(ObjectMapper.class);
|
||||||
INDEX_IO = injector.getInstance(IndexIO.class);
|
INDEX_IO = INJECTOR.getInstance(IndexIO.class);
|
||||||
INDEX_MERGER_V9 = injector.getInstance(IndexMergerV9.class);
|
INDEX_MERGER_V9 = INJECTOR.getInstance(IndexMergerV9.class);
|
||||||
HADOOP_KERBEROS_CONFIG = injector.getInstance(HadoopKerberosConfig.class);
|
HADOOP_KERBEROS_CONFIG = INJECTOR.getInstance(HadoopKerberosConfig.class);
|
||||||
DATA_SEGMENT_PUSHER = injector.getInstance(DataSegmentPusher.class);
|
DATA_SEGMENT_PUSHER = INJECTOR.getInstance(DataSegmentPusher.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum IndexJobCounters
|
public enum IndexJobCounters
|
||||||
|
|
|
@ -317,7 +317,7 @@ public class IndexGeneratorJob implements Jobby
|
||||||
|
|
||||||
public static class IndexGeneratorMapper extends HadoopDruidIndexerMapper<BytesWritable, BytesWritable>
|
public static class IndexGeneratorMapper extends HadoopDruidIndexerMapper<BytesWritable, BytesWritable>
|
||||||
{
|
{
|
||||||
private static final HashFunction hashFunction = Hashing.murmur3_128();
|
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128();
|
||||||
|
|
||||||
private AggregatorFactory[] aggregators;
|
private AggregatorFactory[] aggregators;
|
||||||
|
|
||||||
|
@ -364,7 +364,7 @@ public class IndexGeneratorJob implements Jobby
|
||||||
final long truncatedTimestamp = granularitySpec.getQueryGranularity()
|
final long truncatedTimestamp = granularitySpec.getQueryGranularity()
|
||||||
.bucketStart(inputRow.getTimestamp())
|
.bucketStart(inputRow.getTimestamp())
|
||||||
.getMillis();
|
.getMillis();
|
||||||
final byte[] hashedDimensions = hashFunction.hashBytes(
|
final byte[] hashedDimensions = HASH_FUNCTION.hashBytes(
|
||||||
HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(
|
HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(
|
||||||
Rows.toGroupKey(
|
Rows.toGroupKey(
|
||||||
truncatedTimestamp,
|
truncatedTimestamp,
|
||||||
|
|
|
@ -53,7 +53,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
public class Utils
|
public class Utils
|
||||||
{
|
{
|
||||||
private static final Logger log = new Logger(Utils.class);
|
private static final Logger log = new Logger(Utils.class);
|
||||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||||
|
|
||||||
public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting)
|
public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting)
|
||||||
throws IOException
|
throws IOException
|
||||||
|
@ -120,7 +120,7 @@ public class Utils
|
||||||
{
|
{
|
||||||
FileSystem fs = statsPath.getFileSystem(job.getConfiguration());
|
FileSystem fs = statsPath.getFileSystem(job.getConfiguration());
|
||||||
|
|
||||||
return jsonMapper.readValue(
|
return JSON_MAPPER.readValue(
|
||||||
fs.open(statsPath),
|
fs.open(statsPath),
|
||||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||||
);
|
);
|
||||||
|
@ -128,7 +128,7 @@ public class Utils
|
||||||
|
|
||||||
public static void storeStats(JobContext job, Path path, Map<String, Object> stats) throws IOException
|
public static void storeStats(JobContext job, Path path, Map<String, Object> stats) throws IOException
|
||||||
{
|
{
|
||||||
jsonMapper.writeValue(makePathAndOutputStream(job, path, true), stats);
|
JSON_MAPPER.writeValue(makePathAndOutputStream(job, path, true), stats);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper)
|
public static String getFailureMessage(Job failedJob, ObjectMapper jsonMapper)
|
||||||
|
|
|
@ -45,11 +45,11 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public class HadoopDruidIndexerConfigTest
|
public class HadoopDruidIndexerConfigTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper;
|
private static final ObjectMapper JSON_MAPPER;
|
||||||
|
|
||||||
static {
|
static {
|
||||||
jsonMapper = new DefaultObjectMapper();
|
JSON_MAPPER = new DefaultObjectMapper();
|
||||||
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, jsonMapper));
|
JSON_MAPPER.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, JSON_MAPPER));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -75,7 +75,7 @@ public class HadoopDruidIndexerConfigTest
|
||||||
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
),
|
),
|
||||||
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
||||||
new HadoopTuningConfig(
|
new HadoopTuningConfig(
|
||||||
|
@ -144,7 +144,7 @@ public class HadoopDruidIndexerConfigTest
|
||||||
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
ImmutableList.of(Intervals.of("2010-01-01/P1D"))
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
jsonMapper
|
JSON_MAPPER
|
||||||
),
|
),
|
||||||
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
new HadoopIOConfig(ImmutableMap.of("paths", "bar", "type", "static"), null, null),
|
||||||
new HadoopTuningConfig(
|
new HadoopTuningConfig(
|
||||||
|
|
|
@ -51,11 +51,11 @@ import java.util.Map;
|
||||||
*/
|
*/
|
||||||
public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
{
|
{
|
||||||
private static final String testDatasource = "test";
|
private static final String TEST_DATA_SOURCE = "test";
|
||||||
private static final String testDatasource2 = "test2";
|
private static final String TEST_DATA_SOURCE2 = "test2";
|
||||||
private static final Interval testDatasourceInterval = Intervals.of("1970/3000");
|
private static final Interval TEST_DATA_SOURCE_INTERVAL = Intervals.of("1970/3000");
|
||||||
private static final Interval testDatasourceInterval2 = Intervals.of("2000/2001");
|
private static final Interval TEST_DATA_SOURCE_INTERVAL2 = Intervals.of("2000/2001");
|
||||||
private static final Interval testDatasourceIntervalPartial = Intervals.of("2050/3000");
|
private static final Interval TEST_DATA_SOURCE_INTERVAL_PARTIAL = Intervals.of("2050/3000");
|
||||||
|
|
||||||
private final ObjectMapper jsonMapper;
|
private final ObjectMapper jsonMapper;
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final DataSegment SEGMENT = new DataSegment(
|
private static final DataSegment SEGMENT = new DataSegment(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
Intervals.of("2000/3000"),
|
Intervals.of("2000/3000"),
|
||||||
"ver",
|
"ver",
|
||||||
ImmutableMap.of(
|
ImmutableMap.of(
|
||||||
|
@ -85,7 +85,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
);
|
);
|
||||||
|
|
||||||
private static final DataSegment SEGMENT2 = new DataSegment(
|
private static final DataSegment SEGMENT2 = new DataSegment(
|
||||||
testDatasource2,
|
TEST_DATA_SOURCE2,
|
||||||
Intervals.of("2000/3000"),
|
Intervals.of("2000/3000"),
|
||||||
"ver2",
|
"ver2",
|
||||||
ImmutableMap.of(
|
ImmutableMap.of(
|
||||||
|
@ -112,13 +112,13 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
{
|
{
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false, null),
|
new DatasourceIngestionSpec(TEST_DATA_SOURCE, TEST_DATA_SOURCE_INTERVAL, null, null, null, null, null, false, null),
|
||||||
null,
|
null,
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||||
pathSpec,
|
pathSpec,
|
||||||
testDatasourceInterval
|
TEST_DATA_SOURCE_INTERVAL
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||||
|
@ -132,8 +132,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
testDatasourceInterval,
|
TEST_DATA_SOURCE_INTERVAL,
|
||||||
null,
|
null,
|
||||||
ImmutableList.of(SEGMENT),
|
ImmutableList.of(SEGMENT),
|
||||||
null,
|
null,
|
||||||
|
@ -147,7 +147,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
);
|
);
|
||||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||||
pathSpec,
|
pathSpec,
|
||||||
testDatasourceInterval
|
TEST_DATA_SOURCE_INTERVAL
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||||
|
@ -161,8 +161,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
testDatasourceInterval,
|
TEST_DATA_SOURCE_INTERVAL,
|
||||||
null,
|
null,
|
||||||
ImmutableList.of(SEGMENT.withVersion("v2")),
|
ImmutableList.of(SEGMENT.withVersion("v2")),
|
||||||
null,
|
null,
|
||||||
|
@ -176,7 +176,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
);
|
);
|
||||||
testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||||
pathSpec,
|
pathSpec,
|
||||||
testDatasourceInterval
|
TEST_DATA_SOURCE_INTERVAL
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,8 +187,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
PathSpec pathSpec = new DatasourcePathSpec(
|
PathSpec pathSpec = new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
testDatasourceIntervalPartial,
|
TEST_DATA_SOURCE_INTERVAL_PARTIAL,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
|
@ -202,10 +202,10 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
);
|
);
|
||||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||||
pathSpec,
|
pathSpec,
|
||||||
testDatasourceIntervalPartial
|
TEST_DATA_SOURCE_INTERVAL_PARTIAL
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
ImmutableList.of(new WindowedDataSegment(SEGMENT, testDatasourceIntervalPartial)),
|
ImmutableList.of(new WindowedDataSegment(SEGMENT, TEST_DATA_SOURCE_INTERVAL_PARTIAL)),
|
||||||
((DatasourcePathSpec) config.getPathSpec()).getSegments()
|
((DatasourcePathSpec) config.getPathSpec()).getSegments()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -219,8 +219,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
new DatasourcePathSpec(
|
new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
testDatasourceInterval,
|
TEST_DATA_SOURCE_INTERVAL,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
|
@ -235,8 +235,8 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
new DatasourcePathSpec(
|
new DatasourcePathSpec(
|
||||||
null,
|
null,
|
||||||
new DatasourceIngestionSpec(
|
new DatasourceIngestionSpec(
|
||||||
testDatasource2,
|
TEST_DATA_SOURCE2,
|
||||||
testDatasourceInterval2,
|
TEST_DATA_SOURCE_INTERVAL2,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
|
@ -252,14 +252,14 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
);
|
);
|
||||||
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed(
|
||||||
pathSpec,
|
pathSpec,
|
||||||
testDatasourceInterval
|
TEST_DATA_SOURCE_INTERVAL
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
ImmutableList.of(WindowedDataSegment.of(SEGMENT)),
|
||||||
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(1)).getSegments()
|
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(1)).getSegments()
|
||||||
);
|
);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
ImmutableList.of(new WindowedDataSegment(SEGMENT2, testDatasourceInterval2)),
|
ImmutableList.of(new WindowedDataSegment(SEGMENT2, TEST_DATA_SOURCE_INTERVAL2)),
|
||||||
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(2)).getSegments()
|
((DatasourcePathSpec) ((MultiplePathSpec) config.getPathSpec()).getChildren().get(2)).getSegments()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -300,15 +300,15 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest
|
||||||
|
|
||||||
EasyMock.expect(
|
EasyMock.expect(
|
||||||
segmentLister.getUsedSegmentsForIntervals(
|
segmentLister.getUsedSegmentsForIntervals(
|
||||||
testDatasource,
|
TEST_DATA_SOURCE,
|
||||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval) : null)
|
Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL) : null)
|
||||||
)
|
)
|
||||||
).andReturn(ImmutableList.of(SEGMENT));
|
).andReturn(ImmutableList.of(SEGMENT));
|
||||||
|
|
||||||
EasyMock.expect(
|
EasyMock.expect(
|
||||||
segmentLister.getUsedSegmentsForIntervals(
|
segmentLister.getUsedSegmentsForIntervals(
|
||||||
testDatasource2,
|
TEST_DATA_SOURCE2,
|
||||||
Collections.singletonList(jobInterval != null ? jobInterval.overlap(testDatasourceInterval2) : null)
|
Collections.singletonList(jobInterval != null ? jobInterval.overlap(TEST_DATA_SOURCE_INTERVAL2) : null)
|
||||||
)
|
)
|
||||||
).andReturn(ImmutableList.of(SEGMENT2));
|
).andReturn(ImmutableList.of(SEGMENT2));
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ import java.util.List;
|
||||||
*/
|
*/
|
||||||
public class HadoopTuningConfigTest
|
public class HadoopTuningConfigTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new DefaultObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerde() throws Exception
|
public void testSerde() throws Exception
|
||||||
|
@ -64,7 +64,7 @@ public class HadoopTuningConfigTest
|
||||||
null
|
null
|
||||||
);
|
);
|
||||||
|
|
||||||
HadoopTuningConfig actual = jsonReadWriteRead(jsonMapper.writeValueAsString(expected), HadoopTuningConfig.class);
|
HadoopTuningConfig actual = jsonReadWriteRead(JSON_MAPPER.writeValueAsString(expected), HadoopTuningConfig.class);
|
||||||
|
|
||||||
Assert.assertEquals("/tmp/workingpath", actual.getWorkingPath());
|
Assert.assertEquals("/tmp/workingpath", actual.getWorkingPath());
|
||||||
Assert.assertEquals("version", actual.getVersion());
|
Assert.assertEquals("version", actual.getVersion());
|
||||||
|
@ -88,7 +88,7 @@ public class HadoopTuningConfigTest
|
||||||
public static <T> T jsonReadWriteRead(String s, Class<T> klass)
|
public static <T> T jsonReadWriteRead(String s, Class<T> klass)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
return jsonMapper.readValue(jsonMapper.writeValueAsBytes(jsonMapper.readValue(s, klass)), klass);
|
return JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsBytes(JSON_MAPPER.readValue(s, klass)), klass);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
|
|
@ -84,12 +84,12 @@ import java.util.TreeMap;
|
||||||
@RunWith(Parameterized.class)
|
@RunWith(Parameterized.class)
|
||||||
public class IndexGeneratorJobTest
|
public class IndexGeneratorJobTest
|
||||||
{
|
{
|
||||||
private static final AggregatorFactory[] aggs1 = {
|
private static final AggregatorFactory[] AGGS1 = {
|
||||||
new LongSumAggregatorFactory("visited_num", "visited_num"),
|
new LongSumAggregatorFactory("visited_num", "visited_num"),
|
||||||
new HyperUniquesAggregatorFactory("unique_hosts", "host")
|
new HyperUniquesAggregatorFactory("unique_hosts", "host")
|
||||||
};
|
};
|
||||||
|
|
||||||
private static final AggregatorFactory[] aggs2 = {
|
private static final AggregatorFactory[] AGGS2 = {
|
||||||
new CountAggregatorFactory("count")
|
new CountAggregatorFactory("count")
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -156,7 +156,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
aggs1,
|
AGGS1,
|
||||||
"website"
|
"website"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -204,7 +204,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
aggs1,
|
AGGS1,
|
||||||
"website"
|
"website"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -253,7 +253,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
aggs1,
|
AGGS1,
|
||||||
"website"
|
"website"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -311,7 +311,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
aggs1,
|
AGGS1,
|
||||||
"website"
|
"website"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -344,7 +344,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
1, // force 1 row max per index for easier testing
|
1, // force 1 row max per index for easier testing
|
||||||
null,
|
null,
|
||||||
aggs2,
|
AGGS2,
|
||||||
"inherit_dims"
|
"inherit_dims"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -377,7 +377,7 @@ public class IndexGeneratorJobTest
|
||||||
),
|
),
|
||||||
1, // force 1 row max per index for easier testing
|
1, // force 1 row max per index for easier testing
|
||||||
null,
|
null,
|
||||||
aggs2,
|
AGGS2,
|
||||||
"inherit_dims2"
|
"inherit_dims2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.junit.Test;
|
||||||
|
|
||||||
public class MetadataStorageUpdaterJobSpecTest
|
public class MetadataStorageUpdaterJobSpecTest
|
||||||
{
|
{
|
||||||
private static final ObjectMapper jsonMapper = new ObjectMapper();
|
private static final ObjectMapper JSON_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testMetadaStorageConnectionConfigSimplePassword() throws Exception
|
public void testMetadaStorageConnectionConfigSimplePassword() throws Exception
|
||||||
|
@ -62,7 +62,7 @@ public class MetadataStorageUpdaterJobSpecTest
|
||||||
String pwd
|
String pwd
|
||||||
) throws Exception
|
) throws Exception
|
||||||
{
|
{
|
||||||
MetadataStorageUpdaterJobSpec spec = jsonMapper.readValue(
|
MetadataStorageUpdaterJobSpec spec = JSON_MAPPER.readValue(
|
||||||
"{" +
|
"{" +
|
||||||
"\"type\": \"" + type + "\",\n" +
|
"\"type\": \"" + type + "\",\n" +
|
||||||
"\"connectURI\": \"" + connectURI + "\",\n" +
|
"\"connectURI\": \"" + connectURI + "\",\n" +
|
||||||
|
|
|
@ -39,14 +39,14 @@ import java.io.File;
|
||||||
@JsonTypeName("realtime_appenderator")
|
@JsonTypeName("realtime_appenderator")
|
||||||
public class RealtimeAppenderatorTuningConfig implements TuningConfig, AppenderatorConfig
|
public class RealtimeAppenderatorTuningConfig implements TuningConfig, AppenderatorConfig
|
||||||
{
|
{
|
||||||
private static final int defaultMaxRowsInMemory = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY;
|
private static final int DEFAULT_MAX_ROWS_IN_MEMORY = TuningConfig.DEFAULT_MAX_ROWS_IN_MEMORY;
|
||||||
private static final Period defaultIntermediatePersistPeriod = new Period("PT10M");
|
private static final Period DEFAULT_INTERMEDIATE_PERSIST_PERIOD = new Period("PT10M");
|
||||||
private static final int defaultMaxPendingPersists = 0;
|
private static final int DEFAULT_MAX_PENDING_PERSISTS = 0;
|
||||||
private static final ShardSpec defaultShardSpec = new NumberedShardSpec(0, 1);
|
private static final ShardSpec DEFAULT_SHARD_SPEC = new NumberedShardSpec(0, 1);
|
||||||
private static final IndexSpec defaultIndexSpec = new IndexSpec();
|
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
|
||||||
private static final Boolean defaultReportParseExceptions = Boolean.FALSE;
|
private static final Boolean DEFAULT_REPORT_PARSE_EXCEPTIONS = Boolean.FALSE;
|
||||||
private static final long defaultPublishAndHandoffTimeout = 0;
|
private static final long DEFAULT_HANDOFF_CONDITION_TIMEOUT = 0;
|
||||||
private static final long defaultAlertTimeout = 0;
|
private static final long DEFAULT_ALERT_TIMEOUT = 0;
|
||||||
|
|
||||||
private static File createNewBasePersistDirectory()
|
private static File createNewBasePersistDirectory()
|
||||||
{
|
{
|
||||||
|
@ -93,29 +93,29 @@ public class RealtimeAppenderatorTuningConfig implements TuningConfig, Appendera
|
||||||
@JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions
|
@JsonProperty("maxSavedParseExceptions") @Nullable Integer maxSavedParseExceptions
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
this.maxRowsInMemory = maxRowsInMemory == null ? defaultMaxRowsInMemory : maxRowsInMemory;
|
this.maxRowsInMemory = maxRowsInMemory == null ? DEFAULT_MAX_ROWS_IN_MEMORY : maxRowsInMemory;
|
||||||
// initializing this to 0, it will be lazily intialized to a value
|
// initializing this to 0, it will be lazily intialized to a value
|
||||||
// @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long)
|
// @see server.src.main.java.org.apache.druid.segment.indexing.TuningConfigs#getMaxBytesInMemoryOrDefault(long)
|
||||||
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
|
this.maxBytesInMemory = maxBytesInMemory == null ? 0 : maxBytesInMemory;
|
||||||
this.partitionsSpec = new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows);
|
this.partitionsSpec = new DynamicPartitionsSpec(maxRowsPerSegment, maxTotalRows);
|
||||||
this.intermediatePersistPeriod = intermediatePersistPeriod == null
|
this.intermediatePersistPeriod = intermediatePersistPeriod == null
|
||||||
? defaultIntermediatePersistPeriod
|
? DEFAULT_INTERMEDIATE_PERSIST_PERIOD
|
||||||
: intermediatePersistPeriod;
|
: intermediatePersistPeriod;
|
||||||
this.basePersistDirectory = basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory;
|
this.basePersistDirectory = basePersistDirectory == null ? createNewBasePersistDirectory() : basePersistDirectory;
|
||||||
this.maxPendingPersists = maxPendingPersists == null ? defaultMaxPendingPersists : maxPendingPersists;
|
this.maxPendingPersists = maxPendingPersists == null ? DEFAULT_MAX_PENDING_PERSISTS : maxPendingPersists;
|
||||||
this.shardSpec = shardSpec == null ? defaultShardSpec : shardSpec;
|
this.shardSpec = shardSpec == null ? DEFAULT_SHARD_SPEC : shardSpec;
|
||||||
this.indexSpec = indexSpec == null ? defaultIndexSpec : indexSpec;
|
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
|
||||||
this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ?
|
this.indexSpecForIntermediatePersists = indexSpecForIntermediatePersists == null ?
|
||||||
this.indexSpec : indexSpecForIntermediatePersists;
|
this.indexSpec : indexSpecForIntermediatePersists;
|
||||||
this.reportParseExceptions = reportParseExceptions == null
|
this.reportParseExceptions = reportParseExceptions == null
|
||||||
? defaultReportParseExceptions
|
? DEFAULT_REPORT_PARSE_EXCEPTIONS
|
||||||
: reportParseExceptions;
|
: reportParseExceptions;
|
||||||
this.publishAndHandoffTimeout = publishAndHandoffTimeout == null
|
this.publishAndHandoffTimeout = publishAndHandoffTimeout == null
|
||||||
? defaultPublishAndHandoffTimeout
|
? DEFAULT_HANDOFF_CONDITION_TIMEOUT
|
||||||
: publishAndHandoffTimeout;
|
: publishAndHandoffTimeout;
|
||||||
Preconditions.checkArgument(this.publishAndHandoffTimeout >= 0, "publishAndHandoffTimeout must be >= 0");
|
Preconditions.checkArgument(this.publishAndHandoffTimeout >= 0, "publishAndHandoffTimeout must be >= 0");
|
||||||
|
|
||||||
this.alertTimeout = alertTimeout == null ? defaultAlertTimeout : alertTimeout;
|
this.alertTimeout = alertTimeout == null ? DEFAULT_ALERT_TIMEOUT : alertTimeout;
|
||||||
Preconditions.checkArgument(this.alertTimeout >= 0, "alertTimeout must be >= 0");
|
Preconditions.checkArgument(this.alertTimeout >= 0, "alertTimeout must be >= 0");
|
||||||
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
|
this.segmentWriteOutMediumFactory = segmentWriteOutMediumFactory;
|
||||||
|
|
||||||
|
|
|
@ -703,7 +703,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
|
||||||
// can be injected based on the configuration given in config.getSchema().getIOConfig().getMetadataUpdateSpec()
|
// can be injected based on the configuration given in config.getSchema().getIOConfig().getMetadataUpdateSpec()
|
||||||
final MetadataStorageUpdaterJobHandler maybeHandler;
|
final MetadataStorageUpdaterJobHandler maybeHandler;
|
||||||
if (config.isUpdaterJobSpecSet()) {
|
if (config.isUpdaterJobSpecSet()) {
|
||||||
maybeHandler = injector.getInstance(MetadataStorageUpdaterJobHandler.class);
|
maybeHandler = INJECTOR.getInstance(MetadataStorageUpdaterJobHandler.class);
|
||||||
} else {
|
} else {
|
||||||
maybeHandler = null;
|
maybeHandler = null;
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue