Renamed 'Generic Column' -> 'Numeric Column'; Fixed a few resource leaks in processing; misc refinements (#5957)

This PR accumulates many refactorings and small improvements that I did while preparing the next change set of https://github.com/druid-io/druid/projects/2. I finally decided to make them a separate PR to minimize the volume of the main PR.

Some of the changes:
 - Renamed confusing "Generic Column" term to "Numeric Column" (what it actually implies) in many class names.
 - Generified `ComplexMetricExtractor`
This commit is contained in:
Roman Leventov 2018-10-02 14:50:22 -03:00 committed by GitHub
parent 244046fda5
commit 3ae563263a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
818 changed files with 3881 additions and 3544 deletions

View File

@ -40,26 +40,35 @@
</component>
<component name="NullableNotNullManager">
<option name="myDefaultNullable" value="javax.annotation.Nullable" />
<option name="myDefaultNotNull" value="org.jetbrains.annotations.NotNull" />
<option name="myDefaultNotNull" value="javax.annotation.Nonnull" />
<option name="myNullables">
<value>
<list size="6">
<list size="10">
<item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" />
<item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" />
<item index="2" class="java.lang.String" itemvalue="javax.annotation.CheckForNull" />
<item index="3" class="java.lang.String" itemvalue="org.springframework.lang.Nullable" />
<item index="4" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" />
<item index="5" class="java.lang.String" itemvalue="android.support.annotation.Nullable" />
<item index="6" class="java.lang.String" itemvalue="androidx.annotation.Nullable" />
<item index="7" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.qual.Nullable" />
<item index="8" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NullableDecl" />
<item index="9" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NullableType" />
</list>
</value>
</option>
<option name="myNotNulls">
<value>
<list size="4">
<list size="9">
<item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
<item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
<item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
<item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" />
<item index="4" class="java.lang.String" itemvalue="javax.validation.constraints.NotNull" />
<item index="5" class="java.lang.String" itemvalue="androidx.annotation.NonNull" />
<item index="6" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.qual.NonNull" />
<item index="7" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NonNullDecl" />
<item index="8" class="java.lang.String" itemvalue="org.checkerframework.checker.nullness.compatqual.NonNullType" />
</list>
</value>
</option>

View File

@ -24,6 +24,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.guice.annotations.PublicApi;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import java.util.List;
/**
@ -71,6 +72,7 @@ public interface Row extends Comparable<Row>
*
* @return the value of the provided column name
*/
@Nullable
Object getRaw(String dimension);
/**
@ -79,5 +81,6 @@ public interface Row extends Comparable<Row>
* 1. If the column is absent in the row, numeric zero is returned, rather than null.
* 2. If the column has string value, an attempt is made to parse this value as a number.
*/
@Nullable
Number getMetric(String metric);
}

View File

@ -28,17 +28,16 @@ import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.ParseException;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
/**
*/
public class Rows
{
public static final Long LONG_ZERO = 0L;
/**
* @param timeStamp rollup up timestamp to be used to create group key
@ -55,10 +54,7 @@ public class Rows
dims.put(dim, dimValues);
}
}
return ImmutableList.of(
timeStamp,
dims
);
return ImmutableList.of(timeStamp, dims);
}
/**
@ -70,14 +66,7 @@ public class Rows
return Collections.emptyList();
} else if (inputValue instanceof List) {
// guava's toString function fails on null objects, so please do not use it
final List<Object> values = (List) inputValue;
final List<String> retVal = new ArrayList<>(values.size());
for (Object val : values) {
retVal.add(String.valueOf(val));
}
return retVal;
return ((List<?>) inputValue).stream().map(String::valueOf).collect(Collectors.toList());
} else {
return Collections.singletonList(String.valueOf(inputValue));
}

View File

@ -22,12 +22,12 @@ package org.apache.druid.data.input.impl;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import org.apache.druid.data.input.FiniteFirehoseFactory;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.InputSplit;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.commons.io.IOUtils;
import org.apache.commons.io.LineIterator;
import java.io.File;
import java.io.IOException;

View File

@ -19,10 +19,10 @@
package org.apache.druid.data.input.impl;
import org.apache.commons.io.LineIterator;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.utils.Runnables;
import org.apache.commons.io.LineIterator;
import javax.annotation.Nullable;
import java.io.Closeable;
@ -101,24 +101,9 @@ public class FileIteratingFirehose implements Firehose
@Override
public void close() throws IOException
{
try {
if (lineIterator != null) {
lineIterator.close();
}
}
catch (Throwable t) {
try {
if (closer != null) {
closer.close();
}
}
catch (Exception e) {
t.addSuppressed(e);
}
throw t;
}
if (closer != null) {
closer.close();
try (Closeable ignore = closer;
Closeable ignore2 = lineIterator != null ? lineIterator::close : null) {
// close both via try-with-resources
}
}
}

View File

@ -28,6 +28,8 @@ import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
@ -62,7 +64,7 @@ public abstract class Fetcher<T> implements Iterator<OpenedObject<T>>
// This is updated when a file is successfully fetched, a fetched file is deleted, or a fetched file is
// cached.
private final AtomicLong fetchedBytes = new AtomicLong(0);
private Future<Void> fetchFuture;
private final Deque<Future<Void>> fetchFutures = new ArrayDeque<>();
private PrefetchConfig prefetchConfig;
// nextFetchIndex indicates which object should be downloaded when fetch is triggered.
@ -103,12 +105,13 @@ public abstract class Fetcher<T> implements Iterator<OpenedObject<T>>
*/
private void fetchIfNeeded(long remainingBytes)
{
if ((fetchFuture == null || fetchFuture.isDone())
if ((fetchFutures.isEmpty() || fetchFutures.peekLast().isDone())
&& remainingBytes <= prefetchConfig.getPrefetchTriggerBytes()) {
fetchFuture = fetchExecutor.submit(() -> {
Future<Void> fetchFuture = fetchExecutor.submit(() -> {
fetch();
return null;
});
fetchFutures.add(fetchFuture);
}
}
@ -180,12 +183,17 @@ public abstract class Fetcher<T> implements Iterator<OpenedObject<T>>
private void checkFetchException(boolean wait)
{
try {
if (wait) {
fetchFuture.get(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
fetchFuture = null;
} else if (fetchFuture != null && fetchFuture.isDone()) {
fetchFuture.get();
fetchFuture = null;
for (Future<Void> fetchFuture; (fetchFuture = fetchFutures.poll()) != null; ) {
if (wait) {
fetchFuture.get(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS);
} else {
if (fetchFuture.isDone()) {
fetchFuture.get();
} else {
fetchFutures.addFirst(fetchFuture);
break;
}
}
}
}
catch (InterruptedException | ExecutionException e) {

View File

@ -20,10 +20,9 @@
package org.apache.druid.data.input.impl.prefetch;
import com.google.common.base.Predicate;
import org.apache.commons.io.IOUtils;
import org.apache.druid.java.util.common.RetryUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.commons.io.IOUtils;
import javax.annotation.Nullable;
import java.io.Closeable;
@ -33,7 +32,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;
import java.util.concurrent.ExecutorService;
/**

View File

@ -21,9 +21,11 @@ package org.apache.druid.data.input.impl.prefetch;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.commons.io.LineIterator;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.impl.InputRowParser;
@ -31,8 +33,6 @@ import org.apache.druid.data.input.impl.SqlFirehose;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.logger.Logger;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.LineIterator;
import javax.annotation.Nullable;
import java.io.File;

View File

@ -24,6 +24,7 @@ import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import org.apache.commons.io.LineIterator;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.impl.AbstractTextFilesFirehoseFactory;
import org.apache.druid.data.input.impl.FileIteratingFirehose;
@ -31,7 +32,6 @@ import org.apache.druid.data.input.impl.StringInputRowParser;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.commons.io.LineIterator;
import javax.annotation.Nullable;
import java.io.Closeable;
@ -221,21 +221,24 @@ public abstract class PrefetchableTextFilesFirehoseFactory<T>
}
final OpenedObject<T> openedObject = fetcher.next();
final InputStream stream;
try {
stream = wrapObjectStream(
openedObject.getObject(),
openedObject.getObjectStream()
return new ResourceCloseableLineIterator(
new InputStreamReader(
wrapObjectStream(openedObject.getObject(), openedObject.getObjectStream()),
StandardCharsets.UTF_8
),
openedObject.getResourceCloser()
);
}
catch (IOException e) {
try {
openedObject.getResourceCloser().close();
}
catch (Throwable t) {
e.addSuppressed(t);
}
throw new RuntimeException(e);
}
return new ResourceCloseableLineIterator(
new InputStreamReader(stream, StandardCharsets.UTF_8),
openedObject.getResourceCloser()
);
}
},
firehoseParser,
@ -288,9 +291,8 @@ public abstract class PrefetchableTextFilesFirehoseFactory<T>
@Override
public void close()
{
super.close();
try {
resourceCloser.close();
try (Closeable ignore = this.resourceCloser) {
super.close();
}
catch (IOException e) {
throw new RuntimeException(e);

View File

@ -32,6 +32,7 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Interner;
import com.google.common.collect.Interners;
import com.google.inject.Inject;
import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
import org.apache.druid.guice.annotations.PublicApi;
import org.apache.druid.jackson.CommaListJoinDeserializer;
import org.apache.druid.jackson.CommaListJoinSerializer;
@ -39,7 +40,6 @@ import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.query.SegmentDescriptor;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.apache.druid.timeline.partition.ShardSpec;
import it.unimi.dsi.fastutil.objects.Object2ObjectArrayMap;
import org.joda.time.DateTime;
import org.joda.time.Interval;

View File

@ -22,8 +22,8 @@ package org.apache.druid.data.input.impl;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.druid.data.input.InputRow;
import org.apache.commons.io.LineIterator;
import org.apache.druid.data.input.InputRow;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -20,8 +20,8 @@
package org.apache.druid.data.input.impl;
import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.parsers.Parser;
import junit.framework.Assert;
import org.apache.druid.java.util.common.parsers.Parser;
import org.junit.Test;
import java.util.Arrays;

View File

@ -26,9 +26,9 @@ import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.prefetch.JsonIterator;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;

View File

@ -23,6 +23,8 @@ import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import com.google.common.io.CountingOutputStream;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.TrueFileFilter;
import org.apache.druid.data.input.FiniteFirehoseFactory;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.InputSplit;
@ -34,15 +36,14 @@ import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.RetryUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.TrueFileFilter;
import org.hamcrest.CoreMatchers;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.TemporaryFolder;
import java.io.BufferedWriter;
import java.io.File;
@ -62,8 +63,6 @@ import java.util.concurrent.TimeoutException;
public class PrefetchableTextFilesFirehoseFactoryTest
{
private static final List<File> FIREHOSE_TMP_DIRS = new ArrayList<>();
private static File TEST_DIR;
private static long FILE_SIZE = -1;
private static final StringInputRowParser parser = new StringInputRowParser(
@ -86,16 +85,17 @@ public class PrefetchableTextFilesFirehoseFactoryTest
StandardCharsets.UTF_8.name()
);
@ClassRule
public static TemporaryFolder tempDir = new TemporaryFolder();
private static File TEST_DIR;
@Rule
public ExpectedException expectedException = ExpectedException.none();
@BeforeClass
public static void setup() throws IOException
{
TEST_DIR = File.createTempFile(PrefetchableTextFilesFirehoseFactoryTest.class.getSimpleName(), "testDir");
FileUtils.forceDelete(TEST_DIR);
FileUtils.forceMkdir(TEST_DIR);
TEST_DIR = tempDir.newFolder();
for (int i = 0; i < 100; i++) {
try (
CountingOutputStream cos = new CountingOutputStream(
@ -118,16 +118,6 @@ public class PrefetchableTextFilesFirehoseFactoryTest
}
}
@AfterClass
public static void teardown() throws IOException
{
FileUtils.forceDelete(TEST_DIR);
for (File dir : FIREHOSE_TMP_DIRS) {
FileUtils.forceDelete(dir);
}
FIREHOSE_TMP_DIRS.clear(); // cleanup after ourselves (resolve issue with retries)
}
private static void assertResult(List<Row> rows)
{
Assert.assertEquals(10000, rows.size());
@ -161,16 +151,9 @@ public class PrefetchableTextFilesFirehoseFactoryTest
Assert.assertEquals(expectedNumFiles, files.length);
}
private static File createFirehoseTmpDir(String dirSuffix) throws IOException
private static File createFirehoseTmpDir(String dirPrefix) throws IOException
{
final File firehoseTempDir = File.createTempFile(
PrefetchableTextFilesFirehoseFactoryTest.class.getSimpleName(),
dirSuffix
);
FileUtils.forceDelete(firehoseTempDir);
FileUtils.forceMkdir(firehoseTempDir);
FIREHOSE_TMP_DIRS.add(firehoseTempDir);
return firehoseTempDir;
return Files.createTempDirectory(tempDir.getRoot().toPath(), dirPrefix).toFile();
}
@Test
@ -413,6 +396,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
1024,
cacheCapacity,
fetchCapacity,
60_000, // fetch timeout
3,
0,
0,
@ -521,7 +505,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
long prefetchTriggerThreshold,
long maxCacheCapacityBytes,
long maxFetchCapacityBytes,
long timeout,
long fetchTimeout,
int maxRetry,
int numOpenExceptions,
int maxConnectionResets,
@ -532,7 +516,7 @@ public class PrefetchableTextFilesFirehoseFactoryTest
maxCacheCapacityBytes,
maxFetchCapacityBytes,
prefetchTriggerThreshold,
timeout,
fetchTimeout,
maxRetry
);
this.numOpenExceptions = numOpenExceptions;

View File

@ -33,8 +33,8 @@ import org.apache.druid.query.filter.BoundDimFilter;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.data.BitmapSerdeFactory;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.GenericIndexed;
import org.apache.druid.segment.data.Indexed;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.filter.BoundFilter;
import org.apache.druid.segment.serde.BitmapIndexColumnPartSupplier;
@ -158,17 +158,7 @@ public class BoundFilterBenchmark
final BitmapSerdeFactory serdeFactory = new RoaringBitmapSerdeFactory(null);
final List<Integer> ints = generateInts();
final GenericIndexed<String> dictionary = GenericIndexed.fromIterable(
FluentIterable.from(ints)
.transform(
new Function<Integer, String>()
{
@Override
public String apply(Integer i)
{
return i.toString();
}
}
),
FluentIterable.from(ints).transform(i -> i.toString()),
GenericIndexed.STRING_STRATEGY
);
final BitmapIndex bitmapIndex = new BitmapIndexColumnPartSupplier(
@ -194,7 +184,7 @@ public class BoundFilterBenchmark
selector = new BitmapIndexSelector()
{
@Override
public Indexed<String> getDimensionValues(String dimension)
public CloseableIndexed<String> getDimensionValues(String dimension)
{
return dictionary;
}

View File

@ -19,6 +19,7 @@
package org.apache.druid.benchmark;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.segment.data.ColumnarInts;
import org.apache.druid.segment.data.CompressedVSizeColumnarIntsSupplier;
@ -26,7 +27,6 @@ import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.data.IndexedInts;
import org.apache.druid.segment.data.VSizeColumnarInts;
import org.apache.druid.segment.data.WritableSupplier;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Mode;

View File

@ -35,8 +35,8 @@ import org.apache.druid.query.filter.DruidLongPredicate;
import org.apache.druid.query.filter.DruidPredicateFactory;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.data.BitmapSerdeFactory;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.GenericIndexed;
import org.apache.druid.segment.data.Indexed;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.filter.DimensionPredicateFilter;
import org.apache.druid.segment.serde.BitmapIndexColumnPartSupplier;
@ -155,7 +155,7 @@ public class DimensionPredicateFilterBenchmark
selector = new BitmapIndexSelector()
{
@Override
public Indexed<String> getDimensionValues(String dimension)
public CloseableIndexed<String> getDimensionValues(String dimension)
{
return dictionary;
}

View File

@ -37,7 +37,7 @@ import org.apache.druid.segment.DimensionSelector;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.Column;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.timeline.DataSegment;
@ -175,7 +175,7 @@ public class ExpressionSelectorBenchmark
.getColumnSelectorFactory()
.makeDimensionSelector(
new ExtractionDimensionSpec(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
"v",
new TimeFormatExtractionFn(null, null, null, Granularities.HOUR, true)
)

View File

@ -62,8 +62,8 @@ import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexStorageAdapter;
import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.Column;
import org.apache.druid.segment.column.ColumnConfig;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.data.IndexedInts;
import org.apache.druid.segment.filter.AndFilter;
import org.apache.druid.segment.filter.BoundFilter;
@ -185,7 +185,7 @@ public class FilterPartitionBenchmark
Interval interval = schemaInfo.getDataInterval();
timeFilterNone = new BoundFilter(new BoundDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
String.valueOf(Long.MAX_VALUE),
String.valueOf(Long.MAX_VALUE),
true,
@ -197,7 +197,7 @@ public class FilterPartitionBenchmark
long halfEnd = (interval.getEndMillis() + interval.getStartMillis()) / 2;
timeFilterHalf = new BoundFilter(new BoundDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
String.valueOf(interval.getStartMillis()),
String.valueOf(halfEnd),
true,
@ -208,7 +208,7 @@ public class FilterPartitionBenchmark
));
timeFilterAll = new BoundFilter(new BoundDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
String.valueOf(interval.getStartMillis()),
String.valueOf(interval.getEndMillis()),
true,

View File

@ -23,10 +23,10 @@ import com.google.common.io.Files;
import com.google.common.primitives.Ints;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.java.util.common.io.smoosh.SmooshedFileMapper;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.data.GenericIndexed;
import org.apache.druid.segment.data.GenericIndexedWriter;
import org.apache.druid.segment.data.ObjectStrategy;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@ -65,7 +65,7 @@ public class GenericIndexedBenchmark
static final ObjectStrategy<byte[]> byteArrayStrategy = new ObjectStrategy<byte[]>()
{
@Override
public Class<? extends byte[]> getClazz()
public Class<byte[]> getClazz()
{
return byte[].class;
}

View File

@ -35,8 +35,8 @@ import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.ordering.StringComparators;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.data.BitmapSerdeFactory;
import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.GenericIndexed;
import org.apache.druid.segment.data.Indexed;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.serde.BitmapIndexColumnPartSupplier;
import org.openjdk.jmh.annotations.Benchmark;
@ -155,7 +155,7 @@ public class LikeFilterBenchmark
selector = new BitmapIndexSelector()
{
@Override
public Indexed<String> getDimensionValues(String dimension)
public CloseableIndexed<String> getDimensionValues(String dimension)
{
return dictionary;
}

View File

@ -22,12 +22,10 @@ package org.apache.druid.benchmark;
import com.google.common.collect.Lists;
import com.google.common.collect.Ordering;
import com.google.common.primitives.Ints;
import org.apache.druid.java.util.common.guava.Accumulator;
import org.apache.druid.java.util.common.guava.MergeSequence;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Mode;

View File

@ -37,7 +37,6 @@ import org.openjdk.jmh.infra.Blackhole;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;

View File

@ -19,9 +19,7 @@
package org.apache.druid.benchmark;
import com.google.common.base.Function;
import org.apache.druid.java.util.common.parsers.TimestampParser;
import org.joda.time.DateTime;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;

View File

@ -19,7 +19,6 @@
package org.apache.druid.benchmark.datagen;
import org.apache.druid.segment.column.ValueType;
import org.apache.commons.math3.distribution.AbstractIntegerDistribution;
import org.apache.commons.math3.distribution.AbstractRealDistribution;
import org.apache.commons.math3.distribution.EnumeratedDistribution;
@ -27,6 +26,7 @@ import org.apache.commons.math3.distribution.NormalDistribution;
import org.apache.commons.math3.distribution.UniformRealDistribution;
import org.apache.commons.math3.distribution.ZipfDistribution;
import org.apache.commons.math3.util.Pair;
import org.apache.druid.segment.column.ValueType;
import java.io.Serializable;
import java.util.ArrayList;

View File

@ -0,0 +1,23 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
@EverythingIsNonnullByDefault
package org.apache.druid.benchmark;
import org.apache.druid.annotations.EverythingIsNonnullByDefault;

View File

@ -21,6 +21,7 @@ package org.apache.druid.benchmark.query;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import org.apache.druid.benchmark.datagen.BenchmarkSchemaInfo;
import org.apache.druid.benchmark.datagen.BenchmarkSchemas;
import org.apache.druid.benchmark.datagen.SegmentGenerator;
@ -47,7 +48,6 @@ import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.apache.commons.io.FileUtils;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;

View File

@ -68,8 +68,8 @@ import org.apache.druid.segment.IndexMergerV9;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.column.Column;
import org.apache.druid.segment.column.ColumnConfig;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
@ -178,7 +178,7 @@ public class TimeseriesBenchmark
List<AggregatorFactory> queryAggs = new ArrayList<>();
LongSumAggregatorFactory lsaf = new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential");
BoundDimFilter timeFilter = new BoundDimFilter(Column.TIME_COLUMN_NAME, "200000", "300000", false, false, null, null,
BoundDimFilter timeFilter = new BoundDimFilter(ColumnHolder.TIME_COLUMN_NAME, "200000", "300000", false, false, null, null,
StringComparators.NUMERIC);
queryAggs.add(new FilteredAggregatorFactory(lsaf, timeFilter));
@ -198,7 +198,7 @@ public class TimeseriesBenchmark
List<AggregatorFactory> queryAggs = new ArrayList<>();
LongSumAggregatorFactory lsaf = new LongSumAggregatorFactory("sumLongSequential", "sumLongSequential");
BoundDimFilter timeFilter = new BoundDimFilter(Column.TIME_COLUMN_NAME, "200000", "300000", false, false, null, null,
BoundDimFilter timeFilter = new BoundDimFilter(ColumnHolder.TIME_COLUMN_NAME, "200000", "300000", false, false, null, null,
StringComparators.ALPHANUMERIC);
queryAggs.add(new FilteredAggregatorFactory(lsaf, timeFilter));

View File

@ -71,8 +71,8 @@ import org.apache.druid.segment.IndexMergerV9;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.column.Column;
import org.apache.druid.segment.column.ColumnConfig;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.serde.ComplexMetrics;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
@ -197,7 +197,7 @@ public class TimeCompareBenchmark
"sumLongSequential", "sumLongSequential"
),
new IntervalDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
Collections.singletonList(recent),
null
)
@ -209,7 +209,7 @@ public class TimeCompareBenchmark
"_cmp_sumLongSequential", "sumLongSequential"
),
new IntervalDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
Collections.singletonList(previous),
null
)
@ -247,7 +247,7 @@ public class TimeCompareBenchmark
"sumLongSequential", "sumLongSequential"
),
new IntervalDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
Collections.singletonList(recent),
null
)
@ -259,7 +259,7 @@ public class TimeCompareBenchmark
"_cmp_sumLongSequential", "sumLongSequential"
),
new IntervalDimFilter(
Column.TIME_COLUMN_NAME,
ColumnHolder.TIME_COLUMN_NAME,
Collections.singletonList(previous),
null
)

View File

@ -19,11 +19,10 @@
package org.apache.druid.benchmark;
import org.apache.druid.java.util.common.parsers.Parser;
import org.junit.Assert;
import org.junit.Test;
import org.apache.druid.java.util.common.parsers.Parser;
import java.util.Map;
public class FlattenJSONBenchmarkUtilTest

View File

@ -21,7 +21,6 @@ package org.apache.druid.collections;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import org.apache.druid.java.util.common.guava.nary.BinaryFn;
import java.util.Comparator;

View File

@ -23,7 +23,6 @@ import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import org.apache.druid.java.util.common.guava.FunctionalIterator;
import java.util.Comparator;

View File

@ -22,7 +22,6 @@ package org.apache.druid.collections;
import com.google.common.base.Function;
import com.google.common.base.Throwables;
import com.google.common.collect.Ordering;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.guava.Accumulator;
import org.apache.druid.java.util.common.guava.CloseQuietly;
import org.apache.druid.java.util.common.guava.Sequence;
@ -30,6 +29,7 @@ import org.apache.druid.java.util.common.guava.Yielder;
import org.apache.druid.java.util.common.guava.Yielders;
import org.apache.druid.java.util.common.guava.YieldingAccumulator;
import org.apache.druid.java.util.common.guava.YieldingAccumulators;
import org.apache.druid.java.util.common.io.Closer;
import java.io.IOException;
import java.util.PriorityQueue;

View File

@ -23,9 +23,7 @@ import com.google.common.base.Preconditions;
import com.google.inject.Binder;
import com.google.inject.Inject;
import com.google.inject.Provider;
import org.apache.druid.java.util.common.logger.Logger;
import org.skife.config.ConfigurationObjectFactory;
import java.util.Map;

View File

@ -25,7 +25,6 @@ import com.fasterxml.jackson.databind.introspect.AnnotatedMethod;
import com.fasterxml.jackson.databind.introspect.NopAnnotationIntrospector;
import com.google.inject.BindingAnnotation;
import com.google.inject.Key;
import org.apache.druid.java.util.common.IAE;
import java.lang.annotation.Annotation;

View File

@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.InjectableValues;
import com.google.inject.Injector;
import com.google.inject.Key;
import org.apache.druid.java.util.common.IAE;
/**

View File

@ -19,13 +19,13 @@
package org.apache.druid.math.expr;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.druid.annotations.UsedInGeneratedCode;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.math.expr.antlr.ExprBaseListener;
import org.apache.druid.math.expr.antlr.ExprParser;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.TerminalNode;
import org.apache.commons.lang.StringEscapeUtils;
import java.util.ArrayList;
import java.util.Collections;

View File

@ -26,14 +26,14 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.antlr.ExprLexer;
import org.apache.druid.math.expr.antlr.ExprParser;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.math.expr.antlr.ExprLexer;
import org.apache.druid.math.expr.antlr.ExprParser;
import javax.annotation.Nullable;
import java.lang.reflect.Modifier;

View File

@ -19,14 +19,13 @@
package org.apache.druid.common.utils;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.logger.Logger;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.logger.Logger;
// The issue here is that parameters to the logging system are evaluated eagerly
// So CPU or resource heavy clauses in the log parameters get evaluated even if there is no debug logging
public class LogTest

View File

@ -19,11 +19,10 @@
package org.apache.druid.common.utils;
import org.apache.druid.java.util.common.ISE;
import org.junit.Assert;
import org.junit.Test;
import org.apache.druid.java.util.common.ISE;
import java.util.Properties;
public class PropUtilsTest

View File

@ -19,12 +19,11 @@
package org.apache.druid.common.utils;
import org.apache.druid.java.util.common.ISE;
import org.hamcrest.number.OrderingComparison;
import org.junit.Assert;
import org.junit.Test;
import org.apache.druid.java.util.common.ISE;
public class SocketUtilTest
{
private final int MAX_PORT = 0xffff;

View File

@ -21,10 +21,8 @@ package org.apache.druid.concurrent;
import com.google.common.base.Throwables;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.logger.Logger;
import org.junit.Assert;
import org.junit.Test;

View File

@ -23,11 +23,9 @@ import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import org.apache.druid.java.util.common.lifecycle.Lifecycle;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
import org.junit.Assert;
import org.junit.Test;

View File

@ -0,0 +1,302 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.testing;
import org.junit.runners.model.MultipleFailureException;
import org.junit.runners.model.Statement;
import org.junit.runners.model.TestTimedOutException;
import javax.annotation.Nullable;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.FutureTask;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* This class is based on {@link org.junit.internal.runners.statements.FailOnTimeout}, additionally deadlocked
* threads are detected.
*/
final class DeadlockDetectingFailOnTimeout extends Statement
{
private final Statement originalStatement;
private final TimeUnit timeUnit;
private final long timeout;
DeadlockDetectingFailOnTimeout(long timeout, TimeUnit timeoutUnit, Statement statement)
{
originalStatement = statement;
this.timeout = timeout;
timeUnit = timeoutUnit;
}
@Override
public void evaluate() throws Throwable
{
CallableStatement callable = new CallableStatement();
FutureTask<Throwable> task = new FutureTask<>(callable);
ThreadGroup threadGroup = new ThreadGroup("FailOnTimeoutGroup");
Thread thread = new Thread(threadGroup, task, "Time-limited test");
try {
thread.setDaemon(true);
thread.start();
callable.awaitStarted();
Throwable throwable = getResult(task, thread);
if (throwable != null) {
throw throwable;
}
}
finally {
try {
thread.join(1);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
try {
threadGroup.destroy();
}
catch (IllegalThreadStateException e) {
// If a thread from the group is still alive, the ThreadGroup cannot be destroyed.
// Swallow the exception to keep the same behavior prior to this change.
}
}
}
/**
* Wait for the test task, returning the exception thrown by the test if the
* test failed, an exception indicating a timeout if the test timed out, or
* {@code null} if the test passed.
*/
private Throwable getResult(FutureTask<Throwable> task, Thread thread)
{
try {
if (timeout > 0) {
return task.get(timeout, timeUnit);
} else {
return task.get();
}
}
catch (InterruptedException e) {
return e; // caller will re-throw; no need to call Thread.interrupt()
}
catch (ExecutionException e) {
// test failed; have caller re-throw the exception thrown by the test
return e.getCause();
}
catch (TimeoutException e) {
return createTimeoutException(thread);
}
}
private Exception createTimeoutException(Thread thread)
{
StackTraceElement[] stackTrace = thread.getStackTrace();
Exception currThreadException = new TestTimedOutException(timeout, timeUnit);
if (stackTrace != null) {
currThreadException.setStackTrace(stackTrace);
thread.interrupt();
}
Exception stuckThreadException = getStuckThreadException(thread);
Exception deadlockException = getDeadlockedThreadsException();
if (stuckThreadException != null || deadlockException != null) {
List<Throwable> exceptions = Stream
.of(currThreadException, stuckThreadException, deadlockException)
.filter(Objects::nonNull)
.collect(Collectors.toList());
return new MultipleFailureException(exceptions);
} else {
return currThreadException;
}
}
/**
* Retrieves the stack trace for a given thread.
*
* @param thread The thread whose stack is to be retrieved.
*
* @return The stack trace; returns a zero-length array if the thread has
* terminated or the stack cannot be retrieved for some other reason.
*/
private StackTraceElement[] getStackTrace(Thread thread)
{
try {
return thread.getStackTrace();
}
catch (SecurityException e) {
return new StackTraceElement[0];
}
}
@Nullable
private Exception getStuckThreadException(Thread mainThread)
{
final Thread stuckThread = getStuckThread(mainThread);
if (stuckThread == null) {
return null;
}
Exception stuckThreadException = new Exception("Appears to be stuck in thread " + stuckThread.getName());
stuckThreadException.setStackTrace(getStackTrace(stuckThread));
return stuckThreadException;
}
/**
* Determines whether the test appears to be stuck in some thread other than
* the "main thread" (the one created to run the test). This feature is experimental.
* Behavior may change after the 4.12 release in response to feedback.
*
* @param mainThread The main thread created by {@code evaluate()}
*
* @return The thread which appears to be causing the problem, if different from
* {@code mainThread}, or {@code null} if the main thread appears to be the
* problem or if the thread cannot be determined. The return value is never equal
* to {@code mainThread}.
*/
private Thread getStuckThread(Thread mainThread)
{
List<Thread> threadsInGroup = getThreadsInGroup(mainThread.getThreadGroup());
if (threadsInGroup.isEmpty()) {
return null;
}
// Now that we have all the threads in the test's thread group: Assume that
// any thread we're "stuck" in is RUNNABLE. Look for all RUNNABLE threads.
// If just one, we return that (unless it equals threadMain). If there's more
// than one, pick the one that's using the most CPU time, if this feature is
// supported.
Thread stuckThread = null;
long maxCpuTime = 0;
for (Thread thread : threadsInGroup) {
if (thread.getState() == Thread.State.RUNNABLE) {
long threadCpuTime = cpuTime(thread);
if (stuckThread == null || threadCpuTime > maxCpuTime) {
stuckThread = thread;
maxCpuTime = threadCpuTime;
}
}
}
return (stuckThread == mainThread) ? null : stuckThread;
}
/**
* Returns all active threads belonging to a thread group.
*
* @param group The thread group.
*
* @return The active threads in the thread group. The result should be a
* complete list of the active threads at some point in time. Returns an empty list
* if this cannot be determined, e.g. because new threads are being created at an
* extremely fast rate.
*/
private List<Thread> getThreadsInGroup(ThreadGroup group)
{
final int activeThreadCount = group.activeCount(); // this is just an estimate
int threadArraySize = Math.max(activeThreadCount * 2, 100);
for (int loopCount = 0; loopCount < 5; loopCount++) {
Thread[] threads = new Thread[threadArraySize];
int enumCount = group.enumerate(threads);
if (enumCount < threadArraySize) {
return Arrays.asList(threads).subList(0, enumCount);
}
// if there are too many threads to fit into the array, enumerate's result
// is >= the array's length; therefore we can't trust that it returned all
// the threads. Try again.
threadArraySize += 100;
}
// threads are proliferating too fast for us. Bail before we get into
// trouble.
return Collections.emptyList();
}
/**
* Returns the CPU time used by a thread, if possible.
*
* @param thr The thread to query.
*
* @return The CPU time used by {@code thr}, or 0 if it cannot be determined.
*/
private long cpuTime(Thread thr)
{
ThreadMXBean mxBean = ManagementFactory.getThreadMXBean();
if (mxBean.isThreadCpuTimeSupported()) {
try {
return mxBean.getThreadCpuTime(thr.getId());
}
catch (UnsupportedOperationException ignore) {
// fall through
}
}
return 0;
}
@Nullable
private Exception getDeadlockedThreadsException()
{
final ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
final long[] deadlockedThreadIds = threadMXBean.findDeadlockedThreads();
if (deadlockedThreadIds == null) {
return null;
}
Exception deadlockException = new Exception("Deadlocked threads:");
for (long deadlockedThreadId : deadlockedThreadIds) {
ThreadInfo threadInfo = threadMXBean.getThreadInfo(deadlockedThreadId);
Exception threadException = new Exception(threadInfo.getThreadName() + " at " + threadInfo.getLockName());
threadException.setStackTrace(threadInfo.getStackTrace());
deadlockException.addSuppressed(threadException);
}
return deadlockException;
}
private class CallableStatement implements Callable<Throwable>
{
private final CountDownLatch startLatch = new CountDownLatch(1);
@Override
public Throwable call() throws Exception
{
try {
startLatch.countDown();
originalStatement.evaluate();
}
catch (Exception e) {
throw e;
}
catch (Throwable e) {
return e;
}
return null;
}
public void awaitStarted() throws InterruptedException
{
startLatch.await();
}
}
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.testing;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import java.util.concurrent.TimeUnit;
/**
* This Rule is based on {@link org.junit.rules.Timeout}, additionally deadlocked threads are detected.
*/
public final class DeadlockDetectingTimeout implements TestRule
{
private final long timeout;
private final TimeUnit timeoutUnit;
public DeadlockDetectingTimeout(long timeout, TimeUnit timeoutUnit)
{
this.timeout = timeout;
this.timeoutUnit = timeoutUnit;
}
@Override
public Statement apply(Statement base, Description description)
{
return new DeadlockDetectingFailOnTimeout(timeout, timeoutUnit, base);
}
}

View File

@ -27,12 +27,10 @@ import com.google.common.collect.Maps;
import com.maxmind.geoip2.DatabaseReader;
import com.maxmind.geoip2.exception.GeoIp2Exception;
import com.maxmind.geoip2.model.Omni;
import org.apache.commons.io.FileUtils;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.Row;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.commons.io.FileUtils;
import org.joda.time.DateTime;
import java.io.File;

View File

@ -794,13 +794,6 @@ public class ImmutableConciseSet
this.size = 0;
}
public ImmutableConciseSet(ByteBuffer byteBuffer)
{
this.words = byteBuffer.asIntBuffer();
this.lastWordIndex = words.capacity() - 1;
this.size = calcSize();
}
public ImmutableConciseSet(IntBuffer buffer)
{
this.words = buffer;

View File

@ -20,8 +20,8 @@
package org.apache.druid.extendedset.intset;
import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.StringUtils;
import junit.framework.Assert;
import org.apache.druid.java.util.common.StringUtils;
import org.junit.Test;
import java.nio.IntBuffer;

View File

@ -28,10 +28,10 @@ import com.google.inject.Key;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.emitter.core.Emitter;
import java.util.Collections;
import java.util.List;

View File

@ -33,8 +33,8 @@ import com.google.common.collect.ImmutableSortedMap;
import com.google.common.io.CharStreams;
import com.google.common.io.Files;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import java.io.File;

View File

@ -20,13 +20,13 @@
package org.apache.druid.emitter.ambari.metrics;
import com.google.common.collect.Maps;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.commons.io.IOUtils;
import org.apache.druid.annotations.UsedByJUnitParamsRunner;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
import org.easymock.EasyMock;
import org.joda.time.DateTime;

View File

@ -21,10 +21,10 @@ package org.apache.druid.storage.azure;
import com.google.common.io.ByteSource;
import com.google.inject.Inject;
import org.apache.commons.io.FileUtils;
import org.apache.druid.java.util.common.CompressionUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;

View File

@ -22,9 +22,9 @@ package org.apache.druid.storage.azure;
import com.google.common.base.Optional;
import com.google.common.io.ByteSource;
import com.google.common.io.Files;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Before;

View File

@ -24,13 +24,13 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregateCombiner;
import org.apache.druid.query.aggregation.Aggregator;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.AggregatorUtil;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregateCombiner;
import org.apache.druid.query.aggregation.AggregateCombiner;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.DimensionSelector;

View File

@ -19,14 +19,14 @@
package org.apache.druid.query.aggregation.distinctcount;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.collections.bitmap.WrappedRoaringBitmap;
import org.apache.druid.query.aggregation.BufferAggregator;
import org.apache.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import org.apache.druid.segment.DimensionSelector;
import org.apache.druid.segment.data.IndexedInts;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import java.nio.ByteBuffer;

View File

@ -22,8 +22,8 @@ package org.apache.druid.storage.google;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.googleapis.testing.json.GoogleJsonResponseExceptionFactoryTesting;
import com.google.api.client.json.jackson2.JacksonFactory;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.apache.commons.io.FileUtils;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;

View File

@ -23,9 +23,9 @@ import com.google.api.client.http.InputStreamContent;
import com.google.common.base.Optional;
import com.google.common.io.ByteSource;
import com.google.common.io.Files;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;

View File

@ -29,10 +29,10 @@ import com.google.inject.Key;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import com.google.inject.name.Names;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.emitter.core.Emitter;
import java.util.Collections;
import java.util.List;

View File

@ -20,13 +20,13 @@
package org.apache.druid.emitter.graphite;
import com.google.common.collect.Maps;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.commons.io.IOUtils;
import org.apache.druid.annotations.UsedByJUnitParamsRunner;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.commons.io.IOUtils;
import org.easymock.EasyMock;
import org.joda.time.DateTime;
import org.junit.Assert;

View File

@ -19,12 +19,12 @@
package org.apache.druid.data.input.influx;
import com.google.common.collect.ImmutableList;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.druid.java.util.common.parsers.Parser;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.druid.java.util.common.parsers.Parser;
import javax.annotation.Nullable;
import java.util.LinkedHashMap;

View File

@ -21,11 +21,11 @@ package org.apache.druid.data.input.influx;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.druid.java.util.common.parsers.Parser;
import junitparams.JUnitParamsRunner;
import junitparams.Parameters;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;

View File

@ -23,10 +23,6 @@ import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.net.HostAndPort;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.FunctionalIterable;
import org.apache.druid.java.util.common.logger.Logger;
import kafka.api.FetchRequest;
import kafka.api.FetchRequestBuilder;
import kafka.api.PartitionOffsetRequestInfo;
@ -42,6 +38,9 @@ import kafka.javaapi.TopicMetadataRequest;
import kafka.javaapi.TopicMetadataResponse;
import kafka.javaapi.consumer.SimpleConsumer;
import kafka.message.MessageAndOffset;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.guava.FunctionalIterable;
import org.apache.druid.java.util.common.logger.Logger;
import java.nio.ByteBuffer;
import java.util.ArrayList;

View File

@ -24,10 +24,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.Binder;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.emitter.core.Emitter;
import java.util.Collections;
import java.util.List;

View File

@ -22,8 +22,8 @@ package org.apache.druid.indexing.materializedview;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import com.google.common.base.Strings;
import com.google.common.collect.Sets;
import org.apache.druid.indexing.overlord.DataSourceMetadata;
import java.util.Objects;

View File

@ -23,10 +23,11 @@ import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.base.Strings;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.indexer.HadoopIOConfig;
@ -51,7 +52,6 @@ import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.segment.transform.TransformSpec;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment;
import org.apache.commons.codec.digest.DigestUtils;
import org.joda.time.Interval;
import java.util.List;

View File

@ -40,16 +40,17 @@ import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider;
import org.apache.druid.server.security.AuthorizerMapper;
import static org.easymock.EasyMock.createMock;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.hamcrest.CoreMatchers;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import static org.easymock.EasyMock.createMock;
public class MaterializedViewSupervisorSpecTest
{
@Rule

View File

@ -46,15 +46,13 @@ import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec;
import static org.easymock.EasyMock.expect;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.io.IOException;
import java.util.Collections;
@ -64,7 +62,7 @@ import java.util.Set;
import java.util.SortedMap;
import static org.easymock.EasyMock.createMock;
import org.junit.rules.ExpectedException;
import static org.easymock.EasyMock.expect;
public class MaterializedViewSupervisorTest
{
@ -85,7 +83,7 @@ public class MaterializedViewSupervisorTest
private ObjectMapper objectMapper = TestHelper.makeJsonMapper();
@Before
public void setUp() throws IOException
public void setUp()
{
derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
@ -180,7 +178,7 @@ public class MaterializedViewSupervisorTest
@Test
public void testSuspendedDoesntRun() throws IOException
public void testSuspendedDoesntRun()
{
MaterializedViewSupervisorSpec suspended = new MaterializedViewSupervisorSpec(
"base",
@ -208,10 +206,12 @@ public class MaterializedViewSupervisorTest
// mock IndexerSQLMetadataStorageCoordinator to ensure that getDataSourceMetadata is not called
// which will be true if truly suspended, since this is the first operation of the 'run' method otherwise
IndexerSQLMetadataStorageCoordinator mock = createMock(IndexerSQLMetadataStorageCoordinator.class);
expect(mock.getDataSourceMetadata(suspended.getDataSourceName())).andAnswer((IAnswer) () -> {
Assert.fail();
return null;
}).anyTimes();
expect(mock.getDataSourceMetadata(suspended.getDataSourceName()))
.andAnswer(() -> {
Assert.fail();
return null;
})
.anyTimes();
EasyMock.replay(mock);
supervisor.run();

View File

@ -29,10 +29,8 @@ import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.inject.Inject;
import org.apache.druid.guice.ManageLifecycleLast;
import org.apache.druid.indexing.overlord.DataSourceMetadata;
import org.apache.druid.metadata.MetadataStorageTablesConfig;
import org.apache.druid.metadata.SQLMetadataConnector;
import org.apache.druid.indexing.materializedview.DerivativeDataSourceMetadata;
import org.apache.druid.indexing.overlord.DataSourceMetadata;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.Pair;
@ -41,6 +39,8 @@ import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.metadata.MetadataStorageTablesConfig;
import org.apache.druid.metadata.SQLMetadataConnector;
import org.apache.druid.timeline.DataSegment;
import org.joda.time.Duration;
import org.joda.time.Interval;

View File

@ -41,7 +41,6 @@ import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator;
import org.apache.druid.metadata.TestDerbyConnector;
import org.apache.druid.query.Query;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import org.apache.druid.query.QueryToolChestWarehouse;
import org.apache.druid.query.QueryWatcher;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
@ -69,6 +68,8 @@ import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
public class DatasourceOptimizerTest extends CuratorTestBase
{
@Rule

View File

@ -26,13 +26,6 @@ import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Query;
import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.fullOnInterval;
import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory;
@ -48,6 +41,14 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.fullOnInterval;
import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
public class MaterializedViewQueryTest
{
private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();

View File

@ -22,9 +22,9 @@ package org.apache.druid.emitter.opentsdb;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import java.io.File;
import java.io.FileInputStream;

View File

@ -24,10 +24,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.Binder;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.emitter.core.Emitter;
import java.util.Collections;
import java.util.List;

View File

@ -19,9 +19,9 @@
package org.apache.druid.data.input.parquet;
import com.google.common.base.Preconditions;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.druid.java.util.common.StringUtils;
import java.nio.ByteBuffer;
import java.util.Arrays;

View File

@ -22,6 +22,10 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.data.input.impl.DimensionSchema;
@ -29,10 +33,6 @@ import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.ParseSpec;
import org.apache.druid.data.input.impl.TimestampSpec;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.joda.time.DateTime;
import javax.annotation.Nullable;

View File

@ -23,11 +23,11 @@ package org.apache.parquet.avro;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.parquet.hadoop.api.InitContext;

View File

@ -19,12 +19,12 @@
package org.apache.druid.data.input.parquet;
import com.google.common.collect.Lists;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.druid.indexer.path.StaticPathSpec;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;

View File

@ -30,16 +30,16 @@ import com.rabbitmq.client.Envelope;
import com.rabbitmq.client.QueueingConsumer.Delivery;
import com.rabbitmq.client.ShutdownListener;
import com.rabbitmq.client.ShutdownSignalException;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.java.util.common.logger.Logger;
import net.jodah.lyra.ConnectionOptions;
import net.jodah.lyra.Connections;
import net.jodah.lyra.config.Config;
import net.jodah.lyra.retry.RetryPolicy;
import net.jodah.lyra.util.Duration;
import org.apache.druid.data.input.Firehose;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.java.util.common.logger.Logger;
import javax.annotation.Nullable;
import java.io.File;

View File

@ -22,7 +22,6 @@ package org.apache.druid.examples.rabbitmq;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.ConnectionFactory;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
@ -30,6 +29,7 @@ import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.druid.java.util.common.StringUtils;
import java.text.SimpleDateFormat;
import java.util.ArrayList;

View File

@ -21,15 +21,14 @@ package org.apache.druid.client.cache;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.apache.druid.java.util.common.logger.Logger;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import redis.clients.jedis.exceptions.JedisException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

View File

@ -20,12 +20,12 @@ package org.apache.druid.metadata.storage.sqlserver;
import com.google.common.base.Supplier;
import com.google.inject.Inject;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.metadata.MetadataStorageConnectorConfig;
import org.apache.druid.metadata.MetadataStorageTablesConfig;
import org.apache.druid.metadata.SQLMetadataConnector;
import org.apache.commons.dbcp2.BasicDataSource;
import org.skife.jdbi.v2.Binding;
import org.skife.jdbi.v2.ColonPrefixNamedParamStatementRewriter;
import org.skife.jdbi.v2.DBI;

View File

@ -18,6 +18,8 @@
*/
package org.apache.druid.metadata.storage.sqlserver;
import junit.framework.Assert;
import org.apache.druid.metadata.storage.sqlserver.SQLServerConnector.CustomStatementRewriter;
import org.junit.Before;
import org.junit.Test;
import org.skife.jdbi.v2.Binding;
@ -25,9 +27,6 @@ import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.exceptions.UnableToCreateStatementException;
import org.skife.jdbi.v2.tweak.RewrittenStatement;
import org.apache.druid.metadata.storage.sqlserver.SQLServerConnector.CustomStatementRewriter;
import junit.framework.Assert;
@SuppressWarnings("nls")
public class CustomStatementRewriterTest
{

View File

@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.logger.Logger;

View File

@ -24,10 +24,10 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.inject.Binder;
import com.google.inject.Provides;
import com.google.inject.name.Named;
import org.apache.druid.java.util.emitter.core.Emitter;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.java.util.emitter.core.Emitter;
import java.util.Collections;
import java.util.List;

View File

@ -20,9 +20,9 @@
package org.apache.druid.emitter.statsd;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import com.timgroup.statsd.StatsDClient;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.junit.Test;
import static org.easymock.EasyMock.createMock;

View File

@ -23,12 +23,12 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.druid.java.util.common.IAE;
import com.twitter.elephantbird.mapreduce.io.ThriftWritable;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.MapBasedInputRow;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.ParseSpec;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.parsers.Parser;
import org.apache.hadoop.io.BytesWritable;
import org.apache.thrift.TBase;

View File

@ -21,6 +21,7 @@ package org.apache.druid.data.input.thrift;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.commons.codec.binary.Base64;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.JSONParseSpec;
@ -32,7 +33,6 @@ import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec;
import org.apache.druid.java.util.common.parsers.JSONPathFieldType;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.druid.js.JavaScriptConfig;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.io.BytesWritable;
import org.apache.thrift.TSerializer;
import org.apache.thrift.protocol.TBinaryProtocol;

View File

@ -20,11 +20,11 @@ package org.apache.druid.data.input;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.avro.generic.GenericRecord;
import org.apache.druid.data.input.avro.AvroParsers;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.ParseSpec;
import org.apache.druid.java.util.common.parsers.ObjectFlattener;
import org.apache.avro.generic.GenericRecord;
import java.util.List;

View File

@ -21,11 +21,11 @@ package org.apache.druid.data.input;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import org.apache.avro.generic.GenericRecord;
import org.apache.druid.data.input.avro.AvroBytesDecoder;
import org.apache.druid.data.input.avro.AvroParsers;
import org.apache.druid.data.input.impl.ParseSpec;
import org.apache.druid.java.util.common.parsers.ObjectFlattener;
import org.apache.avro.generic.GenericRecord;
import java.nio.ByteBuffer;
import java.util.List;

View File

@ -23,12 +23,12 @@ import com.google.common.collect.Lists;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.ObjectFlatteners;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.parsers.ObjectFlatteners;
import java.nio.ByteBuffer;
import java.util.Arrays;

View File

@ -19,13 +19,13 @@
package org.apache.druid.data.input.avro;
import org.apache.avro.generic.GenericRecord;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.MapInputRowParser;
import org.apache.druid.data.input.impl.ParseSpec;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.druid.java.util.common.parsers.ObjectFlattener;
import org.apache.druid.java.util.common.parsers.ObjectFlatteners;
import org.apache.avro.generic.GenericRecord;
import java.util.List;

View File

@ -18,11 +18,11 @@
*/
package org.apache.druid.data.input.avro;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.mapreduce.AvroJob;
import org.apache.commons.lang.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -25,15 +25,15 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.common.parsers.ParseException;
import java.io.EOFException;
import java.nio.ByteBuffer;

View File

@ -25,15 +25,15 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.common.parsers.ParseException;
import java.io.EOFException;
import java.nio.ByteBuffer;

View File

@ -23,12 +23,12 @@ import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.druid.java.util.common.parsers.ParseException;
import java.nio.ByteBuffer;

View File

@ -20,15 +20,15 @@ package org.apache.druid.data.input.avro;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.data.input.schemarepo.SubjectAndIdConverter;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.druid.data.input.schemarepo.SubjectAndIdConverter;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.schemarepo.Repository;
import org.schemarepo.api.TypedSchemaRepository;
import org.schemarepo.api.converter.AvroSchemaConverter;

View File

@ -22,8 +22,6 @@ import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import org.apache.druid.data.input.avro.AvroExtensionsModule;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.file.FileReader;
@ -31,6 +29,8 @@ import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.commons.io.FileUtils;
import org.apache.druid.data.input.avro.AvroExtensionsModule;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.backend.executionengine.ExecJob;

View File

@ -27,6 +27,11 @@ import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.druid.data.input.avro.AvroExtensionsModule;
import org.apache.druid.data.input.avro.AvroParseSpec;
import org.apache.druid.data.input.avro.SchemaRepoBasedAvroBytesDecoder;
@ -37,11 +42,6 @@ import org.apache.druid.data.input.schemarepo.Avro1124SubjectAndIdConverter;
import org.apache.druid.java.util.common.parsers.JSONPathFieldSpec;
import org.apache.druid.java.util.common.parsers.JSONPathFieldType;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import org.junit.Before;

View File

@ -21,14 +21,14 @@ package org.apache.druid.data.input.avro;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.junit.Assert;
import org.junit.Test;

View File

@ -22,14 +22,14 @@ package org.apache.druid.data.input.avro;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.junit.Assert;
import org.junit.Test;

View File

@ -20,14 +20,14 @@
package org.apache.druid.data.input.avro;
import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.avro.specific.SpecificDatumWriter;
import org.apache.druid.data.input.AvroStreamInputRowParserTest;
import org.apache.druid.data.input.SomeAvroDatum;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;

Some files were not shown because too many files have changed in this diff Show More