mirror of
https://github.com/apache/druid.git
synced 2025-02-09 03:24:55 +00:00
Fix various bugs; Enable more IntelliJ inspections and update error-prone (#6490)
* Fix various bugs; Enable more IntelliJ inspections and update error-prone * Fix NPE * Fix inspections * Remove unused imports
This commit is contained in:
parent
bcb754d066
commit
54351a5c75
46
.idea/inspectionProfiles/Druid.xml
generated
46
.idea/inspectionProfiles/Druid.xml
generated
@ -11,28 +11,35 @@
|
|||||||
<inspection_tool class="ArrayObjectsEquals" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ArrayObjectsEquals" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="ArraysAsListWithZeroOrOneArgument" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ArraysAsListWithZeroOrOneArgument" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="AssertWithSideEffects" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="AssertWithSideEffects" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="CapturingCleaner" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="CastConflictsWithInstanceof" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="CastConflictsWithInstanceof" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="CastToIncompatibleInterface" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="CastToIncompatibleInterface" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="CatchMayIgnoreException" enabled="true" level="WARNING" enabled_by_default="true">
|
<inspection_tool class="CatchMayIgnoreException" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
<option name="m_ignoreCatchBlocksWithComments" value="false" />
|
<option name="m_ignoreCatchBlocksWithComments" value="false" />
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
<inspection_tool class="CheckValidXmlInScriptTagBody" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="CheckValidXmlInScriptTagBody" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="ClassGetClass" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="ClassNewInstance" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="ClassNewInstance" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
<inspection_tool class="CollectionAddedToSelf" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="CollectionAddedToSelf" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="ComparableImplementedButEqualsNotOverridden" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="ComparableImplementedButEqualsNotOverridden" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
<inspection_tool class="ComparatorMethodParameterNotUsed" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ComparatorMethodParameterNotUsed" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="ComparatorResultComparison" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="CompareToUsesNonFinalVariable" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="CompareToUsesNonFinalVariable" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="ConstantAssertCondition" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ConstantAssertCondition" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="Contract" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="Contract" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="CopyConstructorMissesField" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="CovariantEquals" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="CovariantEquals" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="EmptyInitializer" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="EmptyInitializer" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="EmptyStatementBody" enabled="true" level="WARNING" enabled_by_default="true">
|
<inspection_tool class="EmptyStatementBody" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
<option name="m_reportEmptyBlocks" value="true" />
|
<option name="m_reportEmptyBlocks" value="true" />
|
||||||
<option name="commentsAreContent" value="true" />
|
<option name="commentsAreContent" value="true" />
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
|
<inspection_tool class="EndlessStream" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="EqualsAndHashcode" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="EqualsAndHashcode" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
<inspection_tool class="EqualsBetweenInconvertibleTypes" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="EqualsBetweenInconvertibleTypes" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="EqualsOnSuspiciousObject" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="EqualsUsesNonFinalVariable" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="EqualsUsesNonFinalVariable" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="EqualsWhichDoesntCheckParameterClass" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="EqualsWithItself" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="EqualsWithItself" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="FieldCanBeLocal" enabled="true" level="WARNING" enabled_by_default="true">
|
<inspection_tool class="FieldCanBeLocal" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
<option name="EXCLUDE_ANNOS">
|
<option name="EXCLUDE_ANNOS">
|
||||||
@ -72,6 +79,17 @@
|
|||||||
<inspection_tool class="MathRandomCastToInt" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="MathRandomCastToInt" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="MavenModelInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="MavenModelInspection" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
<inspection_tool class="MismatchedArrayReadWrite" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="MismatchedArrayReadWrite" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="MismatchedCollectionQueryUpdate" enabled="true" level="ERROR" enabled_by_default="true">
|
||||||
|
<option name="queryNames">
|
||||||
|
<value />
|
||||||
|
</option>
|
||||||
|
<option name="updateNames">
|
||||||
|
<value />
|
||||||
|
</option>
|
||||||
|
<option name="ignoredClasses">
|
||||||
|
<value />
|
||||||
|
</option>
|
||||||
|
</inspection_tool>
|
||||||
<inspection_tool class="MismatchedStringBuilderQueryUpdate" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="MismatchedStringBuilderQueryUpdate" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="MissingOverrideAnnotation" enabled="true" level="WARNING" enabled_by_default="true">
|
<inspection_tool class="MissingOverrideAnnotation" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
<scope name="NonGeneratedFiles" level="ERROR" enabled="true">
|
<scope name="NonGeneratedFiles" level="ERROR" enabled="true">
|
||||||
@ -93,6 +111,7 @@
|
|||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
<inspection_tool class="ObjectEqualsNull" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ObjectEqualsNull" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="ObjectToString" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="ObjectToString" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="OverwrittenKey" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="PrimitiveArrayArgumentToVariableArgMethod" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="PrimitiveArrayArgumentToVariableArgMethod" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="RedundantThrows" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="RedundantThrows" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="RedundantTypeArguments" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="RedundantTypeArguments" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
@ -182,22 +201,38 @@
|
|||||||
<constraint name="E" regexp="java\.lang\.UnsupportedOperationException" within="" contains="" />
|
<constraint name="E" regexp="java\.lang\.UnsupportedOperationException" within="" contains="" />
|
||||||
<constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
|
<constraint name="x" minCount="0" maxCount="2147483647" within="" contains="" />
|
||||||
</searchConfiguration>
|
</searchConfiguration>
|
||||||
|
<searchConfiguration name="Use TypeReference<List<...>> instead" created="1539884261626" text="TypeReference<ArrayList<$E$>>" recursive="false" caseInsensitive="true" type="JAVA">
|
||||||
|
<constraint name="__context__" target="true" within="" contains="" />
|
||||||
|
<constraint name="E" within="" contains="" />
|
||||||
|
</searchConfiguration>
|
||||||
|
<searchConfiguration name="Use TypeReference<Map<...>> instead" created="1539884261626" text="TypeReference<HashMap<$K$, $V$>>" recursive="false" caseInsensitive="true" type="JAVA">
|
||||||
|
<constraint name="__context__" target="true" within="" contains="" />
|
||||||
|
<constraint name="K" within="" contains="" />
|
||||||
|
<constraint name="V" within="" contains="" />
|
||||||
|
</searchConfiguration>
|
||||||
|
<searchConfiguration name="Use TypeReference<Set<...>> instead" created="1539884261626" text="TypeReference<HashSet<$E$>>" recursive="false" caseInsensitive="true" type="JAVA">
|
||||||
|
<constraint name="__context__" target="true" within="" contains="" />
|
||||||
|
<constraint name="E" within="" contains="" />
|
||||||
|
</searchConfiguration>
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||||
<option name="processCode" value="true" />
|
<option name="processCode" value="true" />
|
||||||
<option name="processLiterals" value="true" />
|
<option name="processLiterals" value="true" />
|
||||||
<option name="processComments" value="true" />
|
<option name="processComments" value="true" />
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
<inspection_tool class="StaticCallOnSubclass" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="StaticCallOnSubclass" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="StaticFieldReferenceOnSubclass" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="StringConcatenationInFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="StringConcatenationInFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="StringConcatenationInMessageFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="StringConcatenationInMessageFormatCall" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="StringConcatenationMissingWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="StringConcatenationMissingWhitespace" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
<inspection_tool class="StringEquality" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="StringEquality" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="StringEqualsCharSequence" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="StringTokenizerDelimiter" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="StringTokenizerDelimiter" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="SubtractionInCompareTo" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="SubtractionInCompareTo" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="SuspiciousArrayCast" enabled="true" level="WARNING" enabled_by_default="true" />
|
<inspection_tool class="SuspiciousArrayCast" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="SuspiciousArrayMethodCall" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="SuspiciousIndentAfterControlStatement" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="SuspiciousIndentAfterControlStatement" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="SuspiciousListRemoveInLoop" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
<inspection_tool class="SuspiciousMethodCalls" enabled="true" level="ERROR" enabled_by_default="true">
|
<inspection_tool class="SuspiciousMethodCalls" enabled="true" level="ERROR" enabled_by_default="true">
|
||||||
<option name="REPORT_CONVERTIBLE_METHOD_CALLS" value="true" />
|
<option name="REPORT_CONVERTIBLE_METHOD_CALLS" value="true" />
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
@ -226,6 +261,11 @@
|
|||||||
<option name="ignoreInModuleStatements" value="true" />
|
<option name="ignoreInModuleStatements" value="true" />
|
||||||
</inspection_tool>
|
</inspection_tool>
|
||||||
<inspection_tool class="UnnecessaryInterfaceModifier" enabled="true" level="ERROR" enabled_by_default="true" />
|
<inspection_tool class="UnnecessaryInterfaceModifier" enabled="true" level="ERROR" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="UnusedAssignment" enabled="true" level="ERROR" enabled_by_default="true">
|
||||||
|
<option name="REPORT_PREFIX_EXPRESSIONS" value="true" />
|
||||||
|
<option name="REPORT_POSTFIX_EXPRESSIONS" value="true" />
|
||||||
|
<option name="REPORT_REDUNDANT_INITIALIZER" value="true" />
|
||||||
|
</inspection_tool>
|
||||||
<inspection_tool class="UnusedCatchParameter" enabled="true" level="WARNING" enabled_by_default="true">
|
<inspection_tool class="UnusedCatchParameter" enabled="true" level="WARNING" enabled_by_default="true">
|
||||||
<option name="m_ignoreCatchBlocksWithComments" value="false" />
|
<option name="m_ignoreCatchBlocksWithComments" value="false" />
|
||||||
<option name="m_ignoreTestCases" value="false" />
|
<option name="m_ignoreTestCases" value="false" />
|
||||||
|
@ -95,7 +95,6 @@ import java.io.IOException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
@ -167,15 +166,13 @@ public class TimeCompareBenchmark
|
|||||||
INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
|
INDEX_MERGER_V9 = new IndexMergerV9(JSON_MAPPER, INDEX_IO, OffHeapMemorySegmentWriteOutMediumFactory.instance());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final Map<String, Map<String, Object>> SCHEMA_QUERY_MAP = new LinkedHashMap<>();
|
|
||||||
|
|
||||||
private void setupQueries()
|
private void setupQueries()
|
||||||
{
|
{
|
||||||
// queries for the basic schema
|
// queries for the basic schema
|
||||||
Map<String, Object> basicQueries = new LinkedHashMap<>();
|
|
||||||
BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
|
BenchmarkSchemaInfo basicSchema = BenchmarkSchemas.SCHEMA_MAP.get("basic");
|
||||||
|
|
||||||
QuerySegmentSpec intervalSpec = new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
|
QuerySegmentSpec intervalSpec =
|
||||||
|
new MultipleIntervalSegmentSpec(Collections.singletonList(basicSchema.getDataInterval()));
|
||||||
|
|
||||||
long startMillis = basicSchema.getDataInterval().getStartMillis();
|
long startMillis = basicSchema.getDataInterval().getStartMillis();
|
||||||
long endMillis = basicSchema.getDataInterval().getEndMillis();
|
long endMillis = basicSchema.getDataInterval().getEndMillis();
|
||||||
@ -204,9 +201,7 @@ public class TimeCompareBenchmark
|
|||||||
);
|
);
|
||||||
queryAggs.add(
|
queryAggs.add(
|
||||||
new FilteredAggregatorFactory(
|
new FilteredAggregatorFactory(
|
||||||
new LongSumAggregatorFactory(
|
new LongSumAggregatorFactory("_cmp_sumLongSequential", "sumLongSequential"),
|
||||||
"_cmp_sumLongSequential", "sumLongSequential"
|
|
||||||
),
|
|
||||||
new IntervalDimFilter(
|
new IntervalDimFilter(
|
||||||
ColumnHolder.TIME_COLUMN_NAME,
|
ColumnHolder.TIME_COLUMN_NAME,
|
||||||
Collections.singletonList(previous),
|
Collections.singletonList(previous),
|
||||||
@ -235,8 +230,6 @@ public class TimeCompareBenchmark
|
|||||||
new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
|
new TopNQueryQueryToolChest(new TopNQueryConfig(), QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
|
||||||
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
||||||
);
|
);
|
||||||
|
|
||||||
basicQueries.put("topNTimeCompare", queryBuilderA);
|
|
||||||
}
|
}
|
||||||
{ // basic.timeseriesTimeCompare
|
{ // basic.timeseriesTimeCompare
|
||||||
List<AggregatorFactory> queryAggs = new ArrayList<>();
|
List<AggregatorFactory> queryAggs = new ArrayList<>();
|
||||||
@ -265,7 +258,8 @@ public class TimeCompareBenchmark
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
Druids.TimeseriesQueryBuilder timeseriesQueryBuilder = Druids.newTimeseriesQueryBuilder()
|
Druids.TimeseriesQueryBuilder timeseriesQueryBuilder = Druids
|
||||||
|
.newTimeseriesQueryBuilder()
|
||||||
.dataSource("blah")
|
.dataSource("blah")
|
||||||
.granularity(Granularities.ALL)
|
.granularity(Granularities.ALL)
|
||||||
.intervals(intervalSpec)
|
.intervals(intervalSpec)
|
||||||
@ -274,15 +268,11 @@ public class TimeCompareBenchmark
|
|||||||
|
|
||||||
timeseriesQuery = timeseriesQueryBuilder.build();
|
timeseriesQuery = timeseriesQueryBuilder.build();
|
||||||
timeseriesFactory = new TimeseriesQueryRunnerFactory(
|
timeseriesFactory = new TimeseriesQueryRunnerFactory(
|
||||||
new TimeseriesQueryQueryToolChest(
|
new TimeseriesQueryQueryToolChest(QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()),
|
||||||
QueryBenchmarkUtil.NoopIntervalChunkingQueryRunnerDecorator()
|
|
||||||
),
|
|
||||||
new TimeseriesQueryEngine(),
|
new TimeseriesQueryEngine(),
|
||||||
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
QueryBenchmarkUtil.NOOP_QUERYWATCHER
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
SCHEMA_QUERY_MAP.put("basic", basicQueries);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,26 +59,6 @@ public class Comparators
|
|||||||
return NATURAL_NULLS_FIRST;
|
return NATURAL_NULLS_FIRST;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This is a "reverse" comparator. Positive becomes negative, negative becomes positive and 0 (equal) stays the same.
|
|
||||||
* This was poorly named as "inverse" as it's not really inverting a true/false relationship
|
|
||||||
*
|
|
||||||
* @param baseComp
|
|
||||||
* @param <T>
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
public static <T> Comparator<T> inverse(final Comparator<T> baseComp)
|
|
||||||
{
|
|
||||||
return new Comparator<T>()
|
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public int compare(T t, T t1)
|
|
||||||
{
|
|
||||||
return baseComp.compare(t1, t);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Use Guava Ordering.natural() instead
|
* Use Guava Ordering.natural() instead
|
||||||
*
|
*
|
||||||
|
@ -50,7 +50,7 @@ public class Emitters
|
|||||||
Lifecycle lifecycle
|
Lifecycle lifecycle
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
Map<String, Object> jsonified = new HashMap<>();
|
Map<String, Object> jsonified;
|
||||||
if (props.getProperty(LOG_EMITTER_PROP) != null) {
|
if (props.getProperty(LOG_EMITTER_PROP) != null) {
|
||||||
jsonified = makeLoggingMap(props);
|
jsonified = makeLoggingMap(props);
|
||||||
jsonified.put("type", "logging");
|
jsonified.put("type", "logging");
|
||||||
|
@ -77,7 +77,7 @@ public class ChannelResourceFactory implements ResourceFactory<String, ChannelFu
|
|||||||
public ChannelFuture generate(final String hostname)
|
public ChannelFuture generate(final String hostname)
|
||||||
{
|
{
|
||||||
log.debug("Generating: %s", hostname);
|
log.debug("Generating: %s", hostname);
|
||||||
URL url = null;
|
URL url;
|
||||||
try {
|
try {
|
||||||
url = new URL(hostname);
|
url = new URL(hostname);
|
||||||
}
|
}
|
||||||
|
@ -52,6 +52,9 @@ public abstract class ExprEval<T>
|
|||||||
|
|
||||||
public static ExprEval of(@Nullable String stringValue)
|
public static ExprEval of(@Nullable String stringValue)
|
||||||
{
|
{
|
||||||
|
if (stringValue == null) {
|
||||||
|
return StringExprEval.OF_NULL;
|
||||||
|
}
|
||||||
return new StringExprEval(stringValue);
|
return new StringExprEval(stringValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,7 +183,11 @@ public abstract class ExprEval<T>
|
|||||||
case DOUBLE:
|
case DOUBLE:
|
||||||
return this;
|
return this;
|
||||||
case LONG:
|
case LONG:
|
||||||
return ExprEval.of(value == null ? null : asLong());
|
if (value == null) {
|
||||||
|
return ExprEval.of(null);
|
||||||
|
} else {
|
||||||
|
return ExprEval.of(asLong());
|
||||||
|
}
|
||||||
case STRING:
|
case STRING:
|
||||||
return ExprEval.of(asString());
|
return ExprEval.of(asString());
|
||||||
}
|
}
|
||||||
@ -218,7 +225,11 @@ public abstract class ExprEval<T>
|
|||||||
{
|
{
|
||||||
switch (castTo) {
|
switch (castTo) {
|
||||||
case DOUBLE:
|
case DOUBLE:
|
||||||
return ExprEval.of(value == null ? null : asDouble());
|
if (value == null) {
|
||||||
|
return ExprEval.of(null);
|
||||||
|
} else {
|
||||||
|
return ExprEval.of(asDouble());
|
||||||
|
}
|
||||||
case LONG:
|
case LONG:
|
||||||
return this;
|
return this;
|
||||||
case STRING:
|
case STRING:
|
||||||
@ -236,6 +247,8 @@ public abstract class ExprEval<T>
|
|||||||
|
|
||||||
private static class StringExprEval extends ExprEval<String>
|
private static class StringExprEval extends ExprEval<String>
|
||||||
{
|
{
|
||||||
|
private static final StringExprEval OF_NULL = new StringExprEval(null);
|
||||||
|
|
||||||
private Number numericVal;
|
private Number numericVal;
|
||||||
|
|
||||||
private StringExprEval(@Nullable String value)
|
private StringExprEval(@Nullable String value)
|
||||||
|
@ -26,6 +26,7 @@ import org.junit.Test;
|
|||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.NoSuchElementException;
|
import java.util.NoSuchElementException;
|
||||||
|
|
||||||
@ -170,10 +171,9 @@ public class OrderedMergeIteratorTest
|
|||||||
@Test(expected = NoSuchElementException.class)
|
@Test(expected = NoSuchElementException.class)
|
||||||
public void testNoElementInNext()
|
public void testNoElementInNext()
|
||||||
{
|
{
|
||||||
final ArrayList<Iterator<Integer>> iterators = new ArrayList<>();
|
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<>(
|
||||||
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
|
|
||||||
Ordering.natural(),
|
Ordering.natural(),
|
||||||
iterators.iterator()
|
Collections.emptyIterator()
|
||||||
);
|
);
|
||||||
iter.next();
|
iter.next();
|
||||||
}
|
}
|
||||||
@ -181,10 +181,9 @@ public class OrderedMergeIteratorTest
|
|||||||
@Test(expected = UnsupportedOperationException.class)
|
@Test(expected = UnsupportedOperationException.class)
|
||||||
public void testRemove()
|
public void testRemove()
|
||||||
{
|
{
|
||||||
final ArrayList<Iterator<Integer>> iterators = new ArrayList<>();
|
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<>(
|
||||||
OrderedMergeIterator<Integer> iter = new OrderedMergeIterator<Integer>(
|
|
||||||
Ordering.natural(),
|
Ordering.natural(),
|
||||||
iterators.iterator()
|
Collections.emptyIterator()
|
||||||
);
|
);
|
||||||
iter.remove();
|
iter.remove();
|
||||||
}
|
}
|
||||||
|
@ -31,34 +31,6 @@ import java.util.Comparator;
|
|||||||
*/
|
*/
|
||||||
public class ComparatorsTest
|
public class ComparatorsTest
|
||||||
{
|
{
|
||||||
@Test
|
|
||||||
public void testInverse()
|
|
||||||
{
|
|
||||||
Comparator<Integer> normal = Comparators.comparable();
|
|
||||||
Comparator<Integer> inverted = Comparators.inverse(normal);
|
|
||||||
|
|
||||||
Assert.assertEquals(-1, normal.compare(0, 1));
|
|
||||||
Assert.assertEquals(1, normal.compare(1, 0));
|
|
||||||
Assert.assertEquals(0, normal.compare(1, 1));
|
|
||||||
Assert.assertEquals(1, inverted.compare(0, 1));
|
|
||||||
Assert.assertEquals(-1, inverted.compare(1, 0));
|
|
||||||
Assert.assertEquals(0, inverted.compare(1, 1));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testInverseOverflow()
|
|
||||||
{
|
|
||||||
Comparator<Integer> invertedSimpleIntegerComparator = Comparators.inverse(new Comparator<Integer>()
|
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public int compare(Integer o1, Integer o2)
|
|
||||||
{
|
|
||||||
return o1 - o2;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
Assert.assertTrue(invertedSimpleIntegerComparator.compare(0, Integer.MIN_VALUE) < 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testIntervalsByStartThenEnd()
|
public void testIntervalsByStartThenEnd()
|
||||||
{
|
{
|
||||||
|
@ -138,7 +138,7 @@ public class ConcatSequenceTest
|
|||||||
public Integer accumulate(Integer accumulated, Integer in)
|
public Integer accumulate(Integer accumulated, Integer in)
|
||||||
{
|
{
|
||||||
Assert.assertEquals(accumulated, in);
|
Assert.assertEquals(accumulated, in);
|
||||||
return ++accumulated;
|
return accumulated + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
).intValue()
|
).intValue()
|
||||||
@ -154,7 +154,7 @@ public class ConcatSequenceTest
|
|||||||
public Integer accumulate(Integer accumulated, Integer in)
|
public Integer accumulate(Integer accumulated, Integer in)
|
||||||
{
|
{
|
||||||
Assert.assertEquals(accumulated, in);
|
Assert.assertEquals(accumulated, in);
|
||||||
return ++accumulated;
|
return accumulated + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -134,7 +134,7 @@ public class SequenceTestHelper
|
|||||||
@Override
|
@Override
|
||||||
public Integer accumulate(Integer accumulated, Integer in)
|
public Integer accumulate(Integer accumulated, Integer in)
|
||||||
{
|
{
|
||||||
return ++accumulated;
|
return accumulated + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -156,7 +156,7 @@ public class SequenceTestHelper
|
|||||||
@Override
|
@Override
|
||||||
public Integer accumulate(Integer accumulated, Integer in)
|
public Integer accumulate(Integer accumulated, Integer in)
|
||||||
{
|
{
|
||||||
return ++accumulated;
|
return accumulated + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -29,7 +29,7 @@ public final class BitIterator implements IntSet.IntIterator
|
|||||||
private boolean literalAndZeroFill;
|
private boolean literalAndZeroFill;
|
||||||
private int nextIndex = 0;
|
private int nextIndex = 0;
|
||||||
private int nextOffset = 0;
|
private int nextOffset = 0;
|
||||||
private int next = -1;
|
private int next;
|
||||||
|
|
||||||
BitIterator(ImmutableConciseSet immutableConciseSet)
|
BitIterator(ImmutableConciseSet immutableConciseSet)
|
||||||
{
|
{
|
||||||
|
@ -1272,9 +1272,9 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||||||
final ConciseSet other = convert(o);
|
final ConciseSet other = convert(o);
|
||||||
|
|
||||||
// the word at the end must be the same
|
// the word at the end must be the same
|
||||||
int res = this.last - other.last;
|
int res = Integer.compare(this.last, other.last);
|
||||||
if (res != 0) {
|
if (res != 0) {
|
||||||
return res < 0 ? -1 : 1;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
// scan words from MSB to LSB
|
// scan words from MSB to LSB
|
||||||
@ -1295,9 +1295,9 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// compare two sequences of zeros
|
// compare two sequences of zeros
|
||||||
res = getSequenceCount(otherWord) - getSequenceCount(thisWord);
|
res = Integer.compare(getSequenceCount(otherWord), getSequenceCount(thisWord));
|
||||||
if (res != 0) {
|
if (res != 0) {
|
||||||
return res < 0 ? -1 : 1;
|
return res;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (isZeroSequence(otherWord)) {
|
if (isZeroSequence(otherWord)) {
|
||||||
@ -1305,9 +1305,9 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
// compare two sequences of ones
|
// compare two sequences of ones
|
||||||
res = getSequenceCount(thisWord) - getSequenceCount(otherWord);
|
res = Integer.compare(getSequenceCount(thisWord), getSequenceCount(otherWord));
|
||||||
if (res != 0) {
|
if (res != 0) {
|
||||||
return res < 0 ? -1 : 1;
|
return res;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// if the sequences are the same (both zeros or both ones)
|
// if the sequences are the same (both zeros or both ones)
|
||||||
@ -1363,9 +1363,10 @@ public class ConciseSet extends AbstractIntSet implements Serializable
|
|||||||
otherWord--;
|
otherWord--;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
res = thisWord - otherWord; // equals getLiteralBits(thisWord) - getLiteralBits(otherWord)
|
// equals compare(getLiteralBits(thisWord), getLiteralBits(otherWord))
|
||||||
|
res = Integer.compare(thisWord, otherWord);
|
||||||
if (res != 0) {
|
if (res != 0) {
|
||||||
return res < 0 ? -1 : 1;
|
return res;
|
||||||
}
|
}
|
||||||
if (--thisIndex >= 0) {
|
if (--thisIndex >= 0) {
|
||||||
thisWord = this.words[thisIndex];
|
thisWord = this.words[thisIndex];
|
||||||
|
@ -216,7 +216,7 @@ public class ImmutableConciseSet
|
|||||||
int endIndex = length - 1;
|
int endIndex = length - 1;
|
||||||
|
|
||||||
int wordsWalked = 0;
|
int wordsWalked = 0;
|
||||||
int last = 0;
|
int last;
|
||||||
|
|
||||||
WordIterator iter = set.newWordIterator();
|
WordIterator iter = set.newWordIterator();
|
||||||
|
|
||||||
|
@ -30,11 +30,11 @@ public class AzureBlob
|
|||||||
{
|
{
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@NotNull
|
@NotNull
|
||||||
private String container = null;
|
private String container;
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@NotNull
|
@NotNull
|
||||||
private String path = null;
|
private String path;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public AzureBlob(@JsonProperty("container") String container, @JsonProperty("path") String path)
|
public AzureBlob(@JsonProperty("container") String container, @JsonProperty("path") String path)
|
||||||
|
@ -62,9 +62,8 @@ public class CassandraDataSegmentPuller extends CassandraStorage
|
|||||||
final File tmpFile = new File(outDir, "index.zip");
|
final File tmpFile = new File(outDir, "index.zip");
|
||||||
log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath());
|
log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath());
|
||||||
|
|
||||||
final FileUtils.FileCopyResult localResult;
|
|
||||||
try {
|
try {
|
||||||
localResult = RetryUtils.retry(
|
RetryUtils.retry(
|
||||||
() -> {
|
() -> {
|
||||||
try (OutputStream os = new FileOutputStream(tmpFile)) {
|
try (OutputStream os = new FileOutputStream(tmpFile)) {
|
||||||
ChunkedStorage
|
ChunkedStorage
|
||||||
|
@ -29,15 +29,15 @@ public class CloudFilesBlob
|
|||||||
{
|
{
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@NotNull
|
@NotNull
|
||||||
private String container = null;
|
private String container;
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@NotNull
|
@NotNull
|
||||||
private String path = null;
|
private String path;
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@NotNull
|
@NotNull
|
||||||
private String region = null;
|
private String region;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public CloudFilesBlob(
|
public CloudFilesBlob(
|
||||||
|
@ -96,7 +96,7 @@ public class CloudFilesStorageDruidModule implements DruidModule
|
|||||||
{
|
{
|
||||||
log.info("Building Cloud Files Api...");
|
log.info("Building Cloud Files Api...");
|
||||||
|
|
||||||
Iterable<com.google.inject.Module> modules = null;
|
Iterable<com.google.inject.Module> modules;
|
||||||
if (config.getUseServiceNet()) {
|
if (config.getUseServiceNet()) {
|
||||||
log.info("Configuring Cloud Files Api to use the internal service network...");
|
log.info("Configuring Cloud Files Api to use the internal service network...");
|
||||||
modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule());
|
modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule());
|
||||||
|
@ -197,7 +197,7 @@ public class KafkaSimpleConsumer
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
OffsetRequest request = new OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientId);
|
OffsetRequest request = new OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(), clientId);
|
||||||
OffsetResponse response = null;
|
OffsetResponse response;
|
||||||
try {
|
try {
|
||||||
response = consumer.getOffsetsBefore(request);
|
response = consumer.getOffsetsBefore(request);
|
||||||
}
|
}
|
||||||
@ -219,7 +219,7 @@ public class KafkaSimpleConsumer
|
|||||||
|
|
||||||
public Iterable<BytesMessageWithOffset> fetch(long offset, int timeoutMs) throws InterruptedException
|
public Iterable<BytesMessageWithOffset> fetch(long offset, int timeoutMs) throws InterruptedException
|
||||||
{
|
{
|
||||||
FetchResponse response = null;
|
FetchResponse response;
|
||||||
Broker previousLeader = leaderBroker;
|
Broker previousLeader = leaderBroker;
|
||||||
while (true) {
|
while (true) {
|
||||||
ensureConsumer(previousLeader);
|
ensureConsumer(previousLeader);
|
||||||
|
@ -327,7 +327,8 @@ public class MaterializedViewSupervisor implements Supervisor
|
|||||||
// use max created_date of base segments as the version of derivative segments
|
// use max created_date of base segments as the version of derivative segments
|
||||||
Map<Interval, String> maxCreatedDate = baseSegmentsSnapshot.lhs;
|
Map<Interval, String> maxCreatedDate = baseSegmentsSnapshot.lhs;
|
||||||
Map<Interval, String> derivativeVersion = derivativeSegmentsSnapshot.lhs;
|
Map<Interval, String> derivativeVersion = derivativeSegmentsSnapshot.lhs;
|
||||||
SortedMap<Interval, String> sortedToBuildInterval = new TreeMap<>(Comparators.inverse(Comparators.intervalsByStartThenEnd()));
|
SortedMap<Interval, String> sortedToBuildInterval =
|
||||||
|
new TreeMap<>(Comparators.intervalsByStartThenEnd().reversed());
|
||||||
// find the intervals to drop and to build
|
// find the intervals to drop and to build
|
||||||
MapDifference<Interval, String> difference = Maps.difference(maxCreatedDate, derivativeVersion);
|
MapDifference<Interval, String> difference = Maps.difference(maxCreatedDate, derivativeVersion);
|
||||||
Map<Interval, String> toBuildInterval = new HashMap<>(difference.entriesOnlyOnLeft());
|
Map<Interval, String> toBuildInterval = new HashMap<>(difference.entriesOnlyOnLeft());
|
||||||
|
@ -54,7 +54,7 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
|
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
|
||||||
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
|
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
|
||||||
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
|
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
|
||||||
import org.apache.hadoop.mapreduce.JobContext;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.orc.CompressionKind;
|
import org.apache.orc.CompressionKind;
|
||||||
import org.apache.orc.OrcFile;
|
import org.apache.orc.OrcFile;
|
||||||
import org.apache.orc.TypeDescription;
|
import org.apache.orc.TypeDescription;
|
||||||
@ -225,7 +225,7 @@ public class OrcIndexGeneratorJobTest
|
|||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
|
ImmutableMap.of(MRJobConfig.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
|
||||||
false,
|
false,
|
||||||
true,
|
true,
|
||||||
null,
|
null,
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
||||||
|
|
||||||
|
import com.yahoo.sketches.quantiles.DoublesSketch;
|
||||||
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
||||||
import org.apache.druid.query.aggregation.Aggregator;
|
import org.apache.druid.query.aggregation.Aggregator;
|
||||||
import org.apache.druid.segment.ColumnValueSelector;
|
import org.apache.druid.segment.ColumnValueSelector;
|
||||||
@ -35,7 +36,7 @@ public class DoublesSketchBuildAggregator implements Aggregator
|
|||||||
{
|
{
|
||||||
this.valueSelector = valueSelector;
|
this.valueSelector = valueSelector;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
sketch = UpdateDoublesSketch.builder().setK(size).build();
|
sketch = DoublesSketch.builder().setK(size).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
||||||
|
|
||||||
import com.yahoo.memory.WritableMemory;
|
import com.yahoo.memory.WritableMemory;
|
||||||
|
import com.yahoo.sketches.quantiles.DoublesSketch;
|
||||||
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
||||||
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
|
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
|
||||||
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
|
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
|
||||||
@ -54,7 +55,7 @@ public class DoublesSketchBuildBufferAggregator implements BufferAggregator
|
|||||||
{
|
{
|
||||||
final WritableMemory mem = getMemory(buffer);
|
final WritableMemory mem = getMemory(buffer);
|
||||||
final WritableMemory region = mem.writableRegion(position, maxIntermediateSize);
|
final WritableMemory region = mem.writableRegion(position, maxIntermediateSize);
|
||||||
final UpdateDoublesSketch sketch = UpdateDoublesSketch.builder().setK(size).build(region);
|
final UpdateDoublesSketch sketch = DoublesSketch.builder().setK(size).build(region);
|
||||||
putSketch(buffer, position, sketch);
|
putSketch(buffer, position, sketch);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,6 +90,7 @@ public class SketchHolder
|
|||||||
|
|
||||||
private static final Comparator<Memory> MEMORY_COMPARATOR = new Comparator<Memory>()
|
private static final Comparator<Memory> MEMORY_COMPARATOR = new Comparator<Memory>()
|
||||||
{
|
{
|
||||||
|
@SuppressWarnings("SubtractionInCompareTo")
|
||||||
@Override
|
@Override
|
||||||
public int compare(Memory o1, Memory o2)
|
public int compare(Memory o1, Memory o2)
|
||||||
{
|
{
|
||||||
|
@ -74,7 +74,7 @@ public class ArrayOfDoublesSketchToQuantilesSketchPostAggregator extends ArrayOf
|
|||||||
public DoublesSketch compute(final Map<String, Object> combinedAggregators)
|
public DoublesSketch compute(final Map<String, Object> combinedAggregators)
|
||||||
{
|
{
|
||||||
final ArrayOfDoublesSketch sketch = (ArrayOfDoublesSketch) getField().compute(combinedAggregators);
|
final ArrayOfDoublesSketch sketch = (ArrayOfDoublesSketch) getField().compute(combinedAggregators);
|
||||||
final UpdateDoublesSketch qs = UpdateDoublesSketch.builder().setK(k).build();
|
final UpdateDoublesSketch qs = DoublesSketch.builder().setK(k).build();
|
||||||
final ArrayOfDoublesSketchIterator it = sketch.iterator();
|
final ArrayOfDoublesSketchIterator it = sketch.iterator();
|
||||||
while (it.next()) {
|
while (it.next()) {
|
||||||
qs.update(it.getValues()[column - 1]); // convert 1-based column number to zero-based index
|
qs.update(it.getValues()[column - 1]); // convert 1-based column number to zero-based index
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
package org.apache.druid.query.aggregation.datasketches.quantiles;
|
||||||
|
|
||||||
|
import com.yahoo.sketches.quantiles.DoublesSketch;
|
||||||
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
import com.yahoo.sketches.quantiles.UpdateDoublesSketch;
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
|
||||||
@ -44,7 +45,7 @@ public class GenerateTestData
|
|||||||
int sequenceNumber = 0;
|
int sequenceNumber = 0;
|
||||||
for (int i = 0; i < 20; i++) {
|
for (int i = 0; i < 20; i++) {
|
||||||
int product = rand.nextInt(10);
|
int product = rand.nextInt(10);
|
||||||
UpdateDoublesSketch sketch = UpdateDoublesSketch.builder().build();
|
UpdateDoublesSketch sketch = DoublesSketch.builder().build();
|
||||||
for (int j = 0; j < 20; j++) {
|
for (int j = 0; j < 20; j++) {
|
||||||
double value = rand.nextDouble();
|
double value = rand.nextDouble();
|
||||||
buildData.write("2016010101");
|
buildData.write("2016010101");
|
||||||
|
@ -143,15 +143,17 @@ public class DruidKerberosAuthenticationHandler extends KerberosAuthenticationHa
|
|||||||
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
|
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
|
||||||
throws IOException, AuthenticationException
|
throws IOException, AuthenticationException
|
||||||
{
|
{
|
||||||
AuthenticationToken token = null;
|
AuthenticationToken token;
|
||||||
String authorization = request.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION);
|
String authorization = request
|
||||||
|
.getHeader(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.AUTHORIZATION);
|
||||||
|
|
||||||
if (authorization == null
|
if (authorization == null ||
|
||||||
|| !authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE)) {
|
!authorization.startsWith(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE)) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
authorization = authorization.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE
|
authorization = authorization
|
||||||
.length()).trim();
|
.substring(org.apache.hadoop.security.authentication.client.KerberosAuthenticator.NEGOTIATE.length())
|
||||||
|
.trim();
|
||||||
final Base64 base64 = new Base64(0);
|
final Base64 base64 = new Base64(0);
|
||||||
final byte[] clientToken = base64.decode(authorization);
|
final byte[] clientToken = base64.decode(authorization);
|
||||||
final String serverName = request.getServerName();
|
final String serverName = request.getServerName();
|
||||||
|
@ -248,7 +248,7 @@ public class KerberosAuthenticator implements Authenticator
|
|||||||
if (isExcluded(path)) {
|
if (isExcluded(path)) {
|
||||||
filterChain.doFilter(request, response);
|
filterChain.doFilter(request, response);
|
||||||
} else {
|
} else {
|
||||||
String clientPrincipal = null;
|
String clientPrincipal;
|
||||||
try {
|
try {
|
||||||
Cookie[] cookies = httpReq.getCookies();
|
Cookie[] cookies = httpReq.getCookies();
|
||||||
if (cookies == null) {
|
if (cookies == null) {
|
||||||
|
@ -73,6 +73,15 @@ import java.util.HashMap;
|
|||||||
*/
|
*/
|
||||||
public class HdfsDataSegmentPusherTest
|
public class HdfsDataSegmentPusherTest
|
||||||
{
|
{
|
||||||
|
static TestObjectMapper objectMapper;
|
||||||
|
|
||||||
|
static {
|
||||||
|
objectMapper = new TestObjectMapper();
|
||||||
|
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
||||||
|
injectableValues.addValue(ObjectMapper.class, objectMapper);
|
||||||
|
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
||||||
|
objectMapper.setInjectableValues(injectableValues);
|
||||||
|
}
|
||||||
|
|
||||||
@Rule
|
@Rule
|
||||||
public final TemporaryFolder tempFolder = new TemporaryFolder();
|
public final TemporaryFolder tempFolder = new TemporaryFolder();
|
||||||
@ -80,23 +89,15 @@ public class HdfsDataSegmentPusherTest
|
|||||||
@Rule
|
@Rule
|
||||||
public final ExpectedException expectedException = ExpectedException.none();
|
public final ExpectedException expectedException = ExpectedException.none();
|
||||||
|
|
||||||
static TestObjectMapper objectMapper = new TestObjectMapper();
|
|
||||||
|
|
||||||
private HdfsDataSegmentPusher hdfsDataSegmentPusher;
|
private HdfsDataSegmentPusher hdfsDataSegmentPusher;
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws IOException
|
public void setUp()
|
||||||
{
|
{
|
||||||
HdfsDataSegmentPusherConfig hdfsDataSegmentPusherConf = new HdfsDataSegmentPusherConfig();
|
HdfsDataSegmentPusherConfig hdfsDataSegmentPusherConf = new HdfsDataSegmentPusherConfig();
|
||||||
hdfsDataSegmentPusherConf.setStorageDirectory("path/to/");
|
hdfsDataSegmentPusherConf.setStorageDirectory("path/to/");
|
||||||
hdfsDataSegmentPusher = new HdfsDataSegmentPusher(hdfsDataSegmentPusherConf, new Configuration(true), objectMapper);
|
hdfsDataSegmentPusher = new HdfsDataSegmentPusher(hdfsDataSegmentPusherConf, new Configuration(true), objectMapper);
|
||||||
}
|
}
|
||||||
static {
|
|
||||||
objectMapper = new TestObjectMapper();
|
|
||||||
InjectableValues.Std injectableValues = new InjectableValues.Std();
|
|
||||||
injectableValues.addValue(ObjectMapper.class, objectMapper);
|
|
||||||
injectableValues.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT);
|
|
||||||
objectMapper.setInjectableValues(injectableValues);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPushWithScheme() throws Exception
|
public void testPushWithScheme() throws Exception
|
||||||
|
@ -363,7 +363,6 @@ public class ApproximateHistogram
|
|||||||
mergeValue = true;
|
mergeValue = true;
|
||||||
}
|
}
|
||||||
if (deltaLeft < minDelta) {
|
if (deltaLeft < minDelta) {
|
||||||
minDelta = deltaLeft;
|
|
||||||
minPos = insertAt - 1;
|
minPos = insertAt - 1;
|
||||||
mergeValue = true;
|
mergeValue = true;
|
||||||
}
|
}
|
||||||
@ -1563,7 +1562,7 @@ public class ApproximateHistogram
|
|||||||
int i = 0;
|
int i = 0;
|
||||||
int sum = 0;
|
int sum = 0;
|
||||||
int k = 1;
|
int k = 1;
|
||||||
long count = 0;
|
long count;
|
||||||
while (k <= this.binCount()) {
|
while (k <= this.binCount()) {
|
||||||
count = bins[k - 1];
|
count = bins[k - 1];
|
||||||
if (sum + count > s) {
|
if (sum + count > s) {
|
||||||
@ -1583,7 +1582,7 @@ public class ApproximateHistogram
|
|||||||
final double c = -2 * d;
|
final double c = -2 * d;
|
||||||
final long a = bins[i] - bins[i - 1];
|
final long a = bins[i] - bins[i - 1];
|
||||||
final long b = 2 * bins[i - 1];
|
final long b = 2 * bins[i - 1];
|
||||||
double z = 0;
|
double z;
|
||||||
if (a == 0) {
|
if (a == 0) {
|
||||||
z = -c / b;
|
z = -c / b;
|
||||||
} else {
|
} else {
|
||||||
|
@ -22,6 +22,7 @@ package org.apache.druid.query.aggregation.histogram;
|
|||||||
import com.google.common.collect.Iterators;
|
import com.google.common.collect.Iterators;
|
||||||
import org.apache.druid.java.util.common.StringUtils;
|
import org.apache.druid.java.util.common.StringUtils;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -221,6 +222,7 @@ public class ApproximateHistogramTest
|
|||||||
}
|
}
|
||||||
|
|
||||||
//@Test
|
//@Test
|
||||||
|
@Ignore
|
||||||
@SuppressWarnings("unused") //TODO rewrite using JMH and move to the benchmarks module
|
@SuppressWarnings("unused") //TODO rewrite using JMH and move to the benchmarks module
|
||||||
public void testFoldSpeed()
|
public void testFoldSpeed()
|
||||||
{
|
{
|
||||||
|
@ -34,7 +34,6 @@ import com.google.common.collect.ImmutableList;
|
|||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSet;
|
import com.google.common.collect.ImmutableSet;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
@ -43,6 +42,7 @@ import com.google.common.util.concurrent.MoreExecutors;
|
|||||||
import org.apache.commons.codec.digest.DigestUtils;
|
import org.apache.commons.codec.digest.DigestUtils;
|
||||||
import org.apache.druid.indexer.TaskLocation;
|
import org.apache.druid.indexer.TaskLocation;
|
||||||
import org.apache.druid.indexer.TaskStatus;
|
import org.apache.druid.indexer.TaskStatus;
|
||||||
|
import org.apache.druid.indexing.common.IndexTaskClient;
|
||||||
import org.apache.druid.indexing.common.TaskInfoProvider;
|
import org.apache.druid.indexing.common.TaskInfoProvider;
|
||||||
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
|
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
|
||||||
import org.apache.druid.indexing.common.task.RealtimeIndexTask;
|
import org.apache.druid.indexing.common.task.RealtimeIndexTask;
|
||||||
@ -129,7 +129,6 @@ public class KafkaSupervisor implements Supervisor
|
|||||||
private static final long INITIAL_GET_OFFSET_DELAY_MILLIS = 15000;
|
private static final long INITIAL_GET_OFFSET_DELAY_MILLIS = 15000;
|
||||||
private static final long INITIAL_EMIT_LAG_METRIC_DELAY_MILLIS = 25000;
|
private static final long INITIAL_EMIT_LAG_METRIC_DELAY_MILLIS = 25000;
|
||||||
private static final int MAX_INITIALIZATION_RETRIES = 20;
|
private static final int MAX_INITIALIZATION_RETRIES = 20;
|
||||||
private static final CopyOnWriteArrayList EMPTY_LIST = Lists.newCopyOnWriteArrayList();
|
|
||||||
|
|
||||||
public static final String IS_INCREMENTAL_HANDOFF_SUPPORTED = "IS_INCREMENTAL_HANDOFF_SUPPORTED";
|
public static final String IS_INCREMENTAL_HANDOFF_SUPPORTED = "IS_INCREMENTAL_HANDOFF_SUPPORTED";
|
||||||
|
|
||||||
@ -337,7 +336,7 @@ public class KafkaSupervisor implements Supervisor
|
|||||||
this.futureTimeoutInSeconds = Math.max(
|
this.futureTimeoutInSeconds = Math.max(
|
||||||
MINIMUM_FUTURE_TIMEOUT_IN_SECONDS,
|
MINIMUM_FUTURE_TIMEOUT_IN_SECONDS,
|
||||||
tuningConfig.getChatRetries() * (tuningConfig.getHttpTimeout().getStandardSeconds()
|
tuningConfig.getChatRetries() * (tuningConfig.getHttpTimeout().getStandardSeconds()
|
||||||
+ KafkaIndexTaskClient.MAX_RETRY_WAIT_SECONDS)
|
+ IndexTaskClient.MAX_RETRY_WAIT_SECONDS)
|
||||||
);
|
);
|
||||||
|
|
||||||
int chatThreads = (this.tuningConfig.getChatThreads() != null
|
int chatThreads = (this.tuningConfig.getChatThreads() != null
|
||||||
@ -468,6 +467,12 @@ public class KafkaSupervisor implements Supervisor
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean someTaskGroupsPendingCompletion(Integer groupId)
|
||||||
|
{
|
||||||
|
CopyOnWriteArrayList<TaskGroup> taskGroups = pendingCompletionTaskGroups.get(groupId);
|
||||||
|
return taskGroups != null && taskGroups.size() > 0;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SupervisorReport getStatus()
|
public SupervisorReport getStatus()
|
||||||
{
|
{
|
||||||
@ -1341,7 +1346,7 @@ public class KafkaSupervisor implements Supervisor
|
|||||||
partitionOffset.getValue() :
|
partitionOffset.getValue() :
|
||||||
latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(), partitionOffset.getValue())
|
latestOffsetsFromDb.getOrDefault(partitionOffset.getKey(), partitionOffset.getValue())
|
||||||
) == 0) && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey())) || (
|
) == 0) && earliestConsistentSequenceId.compareAndSet(-1, sequenceCheckpoint.getKey())) || (
|
||||||
pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() > 0
|
someTaskGroupsPendingCompletion(groupId)
|
||||||
&& earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) {
|
&& earliestConsistentSequenceId.compareAndSet(-1, taskCheckpoints.firstKey()))) {
|
||||||
final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>(
|
final SortedMap<Integer, Map<Integer, Long>> latestCheckpoints = new TreeMap<>(
|
||||||
taskCheckpoints.tailMap(earliestConsistentSequenceId.get())
|
taskCheckpoints.tailMap(earliestConsistentSequenceId.get())
|
||||||
@ -1378,7 +1383,7 @@ public class KafkaSupervisor implements Supervisor
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) ||
|
if ((tasksToKill.size() > 0 && tasksToKill.size() == taskGroup.tasks.size()) ||
|
||||||
(taskGroup.tasks.size() == 0 && pendingCompletionTaskGroups.getOrDefault(groupId, EMPTY_LIST).size() == 0)) {
|
(taskGroup.tasks.size() == 0 && !someTaskGroupsPendingCompletion(groupId))) {
|
||||||
// killing all tasks or no task left in the group ?
|
// killing all tasks or no task left in the group ?
|
||||||
// clear state about the taskgroup so that get latest offset information is fetched from metadata store
|
// clear state about the taskgroup so that get latest offset information is fetched from metadata store
|
||||||
log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId);
|
log.warn("Clearing task group [%d] information as no valid tasks left the group", groupId);
|
||||||
|
@ -29,7 +29,7 @@ import org.apache.kafka.clients.consumer.KafkaConsumer;
|
|||||||
import org.apache.kafka.clients.producer.KafkaProducer;
|
import org.apache.kafka.clients.producer.KafkaProducer;
|
||||||
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
|
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
|
||||||
import org.apache.kafka.common.serialization.ByteArraySerializer;
|
import org.apache.kafka.common.serialization.ByteArraySerializer;
|
||||||
import org.apache.kafka.common.utils.SystemTime;
|
import org.apache.kafka.common.utils.Time;
|
||||||
import scala.Some;
|
import scala.Some;
|
||||||
import scala.collection.immutable.List$;
|
import scala.collection.immutable.List$;
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ public class TestBroker implements Closeable
|
|||||||
|
|
||||||
final KafkaConfig config = new KafkaConfig(props);
|
final KafkaConfig config = new KafkaConfig(props);
|
||||||
|
|
||||||
server = new KafkaServer(config, SystemTime.SYSTEM, Some.apply(StringUtils.format("TestingBroker[%d]-", id)), List$.MODULE$.empty());
|
server = new KafkaServer(config, Time.SYSTEM, Some.apply(StringUtils.format("TestingBroker[%d]-", id)), List$.MODULE$.empty());
|
||||||
server.startup();
|
server.startup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -311,7 +311,7 @@ public class CacheSchedulerTest
|
|||||||
testDelete();
|
testDelete();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDelete() throws InterruptedException
|
private void testDelete() throws InterruptedException
|
||||||
{
|
{
|
||||||
final long period = 1_000L; // Give it some time between attempts to update
|
final long period = 1_000L; // Give it some time between attempts to update
|
||||||
final UriExtractionNamespace namespace = getUriExtractionNamespace(period);
|
final UriExtractionNamespace namespace = getUriExtractionNamespace(period);
|
||||||
|
@ -519,7 +519,7 @@ public class JdbcExtractionNamespaceTest
|
|||||||
private void waitForUpdates(long timeout, long numUpdates) throws InterruptedException
|
private void waitForUpdates(long timeout, long numUpdates) throws InterruptedException
|
||||||
{
|
{
|
||||||
long startTime = System.currentTimeMillis();
|
long startTime = System.currentTimeMillis();
|
||||||
long pre = 0L;
|
long pre;
|
||||||
updateLock.lockInterruptibly();
|
updateLock.lockInterruptibly();
|
||||||
try {
|
try {
|
||||||
pre = updates.get();
|
pre = updates.get();
|
||||||
@ -527,7 +527,7 @@ public class JdbcExtractionNamespaceTest
|
|||||||
finally {
|
finally {
|
||||||
updateLock.unlock();
|
updateLock.unlock();
|
||||||
}
|
}
|
||||||
long post = 0L;
|
long post;
|
||||||
do {
|
do {
|
||||||
// Sleep to spare a few cpu cycles
|
// Sleep to spare a few cpu cycles
|
||||||
Thread.sleep(5);
|
Thread.sleep(5);
|
||||||
|
@ -111,7 +111,7 @@ public class ProtobufInputRowParser implements ByteBufferInputRowParser
|
|||||||
|
|
||||||
fin = this.getClass().getClassLoader().getResourceAsStream(descriptorFilePath);
|
fin = this.getClass().getClassLoader().getResourceAsStream(descriptorFilePath);
|
||||||
if (fin == null) {
|
if (fin == null) {
|
||||||
URL url = null;
|
URL url;
|
||||||
try {
|
try {
|
||||||
url = new URL(descriptorFilePath);
|
url = new URL(descriptorFilePath);
|
||||||
}
|
}
|
||||||
@ -126,7 +126,7 @@ public class ProtobufInputRowParser implements ByteBufferInputRowParser
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DynamicSchema dynamicSchema = null;
|
DynamicSchema dynamicSchema;
|
||||||
try {
|
try {
|
||||||
dynamicSchema = DynamicSchema.parseFrom(fin);
|
dynamicSchema = DynamicSchema.parseFrom(fin);
|
||||||
}
|
}
|
||||||
|
@ -125,12 +125,12 @@ public class VarianceGroupByQueryTest
|
|||||||
.setDataSource(QueryRunnerTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
|
||||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||||
.setAggregatorSpecs(VarianceTestHelper.rowsCount,
|
.setAggregatorSpecs(
|
||||||
|
QueryRunnerTestHelper.rowsCount,
|
||||||
VarianceTestHelper.indexVarianceAggr,
|
VarianceTestHelper.indexVarianceAggr,
|
||||||
new LongSumAggregatorFactory("idx", "index"))
|
new LongSumAggregatorFactory("idx", "index")
|
||||||
.setPostAggregatorSpecs(
|
|
||||||
Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr)
|
|
||||||
)
|
)
|
||||||
|
.setPostAggregatorSpecs(Collections.singletonList(VarianceTestHelper.stddevOfIndexPostAggr))
|
||||||
.setGranularity(QueryRunnerTestHelper.dayGran)
|
.setGranularity(QueryRunnerTestHelper.dayGran)
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
@ -178,12 +178,14 @@ public class VarianceGroupByQueryTest
|
|||||||
|
|
||||||
GroupByQuery query = GroupByQuery
|
GroupByQuery query = GroupByQuery
|
||||||
.builder()
|
.builder()
|
||||||
.setDataSource(VarianceTestHelper.dataSource)
|
.setDataSource(QueryRunnerTestHelper.dataSource)
|
||||||
.setInterval("2011-04-02/2011-04-04")
|
.setInterval("2011-04-02/2011-04-04")
|
||||||
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
.setDimensions(new DefaultDimensionSpec("quality", "alias"))
|
||||||
.setAggregatorSpecs(VarianceTestHelper.rowsCount,
|
.setAggregatorSpecs(
|
||||||
VarianceTestHelper.indexLongSum,
|
QueryRunnerTestHelper.rowsCount,
|
||||||
VarianceTestHelper.indexVarianceAggr)
|
QueryRunnerTestHelper.indexLongSum,
|
||||||
|
VarianceTestHelper.indexVarianceAggr
|
||||||
|
)
|
||||||
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr))
|
.setPostAggregatorSpecs(ImmutableList.of(VarianceTestHelper.stddevOfIndexPostAggr))
|
||||||
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
|
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
|
||||||
.setHavingSpec(
|
.setHavingSpec(
|
||||||
|
@ -23,6 +23,7 @@ import org.apache.druid.java.util.common.DateTimes;
|
|||||||
import org.apache.druid.query.Druids;
|
import org.apache.druid.query.Druids;
|
||||||
import org.apache.druid.query.QueryPlus;
|
import org.apache.druid.query.QueryPlus;
|
||||||
import org.apache.druid.query.QueryRunner;
|
import org.apache.druid.query.QueryRunner;
|
||||||
|
import org.apache.druid.query.QueryRunnerTestHelper;
|
||||||
import org.apache.druid.query.Result;
|
import org.apache.druid.query.Result;
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.timeseries.TimeseriesQuery;
|
import org.apache.druid.query.timeseries.TimeseriesQuery;
|
||||||
@ -59,13 +60,13 @@ public class VarianceTimeseriesQueryTest
|
|||||||
public void testTimeseriesWithNullFilterOnNonExistentDimension()
|
public void testTimeseriesWithNullFilterOnNonExistentDimension()
|
||||||
{
|
{
|
||||||
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
|
||||||
.dataSource(VarianceTestHelper.dataSource)
|
.dataSource(QueryRunnerTestHelper.dataSource)
|
||||||
.granularity(VarianceTestHelper.dayGran)
|
.granularity(QueryRunnerTestHelper.dayGran)
|
||||||
.filters("bobby", null)
|
.filters("bobby", null)
|
||||||
.intervals(VarianceTestHelper.firstToThird)
|
.intervals(QueryRunnerTestHelper.firstToThird)
|
||||||
.aggregators(VarianceTestHelper.commonPlusVarAggregators)
|
.aggregators(VarianceTestHelper.commonPlusVarAggregators)
|
||||||
.postAggregators(
|
.postAggregators(
|
||||||
VarianceTestHelper.addRowsIndexConstant,
|
QueryRunnerTestHelper.addRowsIndexConstant,
|
||||||
VarianceTestHelper.stddevOfIndexPostAggr
|
VarianceTestHelper.stddevOfIndexPostAggr
|
||||||
)
|
)
|
||||||
.descending(descending)
|
.descending(descending)
|
||||||
@ -75,11 +76,11 @@ public class VarianceTimeseriesQueryTest
|
|||||||
new Result<>(
|
new Result<>(
|
||||||
DateTimes.of("2011-04-01"),
|
DateTimes.of("2011-04-01"),
|
||||||
new TimeseriesResultValue(
|
new TimeseriesResultValue(
|
||||||
VarianceTestHelper.of(
|
QueryRunnerTestHelper.of(
|
||||||
"rows", 13L,
|
"rows", 13L,
|
||||||
"index", 6626.151596069336,
|
"index", 6626.151596069336,
|
||||||
"addRowsIndexConstant", 6640.151596069336,
|
"addRowsIndexConstant", 6640.151596069336,
|
||||||
"uniques", VarianceTestHelper.UNIQUES_9,
|
"uniques", QueryRunnerTestHelper.UNIQUES_9,
|
||||||
"index_var", descending ? 368885.6897238851 : 368885.689155086,
|
"index_var", descending ? 368885.6897238851 : 368885.689155086,
|
||||||
"index_stddev", descending ? 607.3596049490657 : 607.35960448081
|
"index_stddev", descending ? 607.3596049490657 : 607.35960448081
|
||||||
)
|
)
|
||||||
@ -88,11 +89,11 @@ public class VarianceTimeseriesQueryTest
|
|||||||
new Result<>(
|
new Result<>(
|
||||||
DateTimes.of("2011-04-02"),
|
DateTimes.of("2011-04-02"),
|
||||||
new TimeseriesResultValue(
|
new TimeseriesResultValue(
|
||||||
VarianceTestHelper.of(
|
QueryRunnerTestHelper.of(
|
||||||
"rows", 13L,
|
"rows", 13L,
|
||||||
"index", 5833.2095947265625,
|
"index", 5833.2095947265625,
|
||||||
"addRowsIndexConstant", 5847.2095947265625,
|
"addRowsIndexConstant", 5847.2095947265625,
|
||||||
"uniques", VarianceTestHelper.UNIQUES_9,
|
"uniques", QueryRunnerTestHelper.UNIQUES_9,
|
||||||
"index_var", descending ? 259061.6037088883 : 259061.60216419376,
|
"index_var", descending ? 259061.6037088883 : 259061.60216419376,
|
||||||
"index_stddev", descending ? 508.9809463122252 : 508.98094479478675
|
"index_stddev", descending ? 508.9809463122252 : 508.98094479478675
|
||||||
)
|
)
|
||||||
|
@ -358,7 +358,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||||||
short numNonZeroRegisters = addNibbleRegister(bucket, (byte) ((0xff & positionOf1) - registerOffset));
|
short numNonZeroRegisters = addNibbleRegister(bucket, (byte) ((0xff & positionOf1) - registerOffset));
|
||||||
setNumNonZeroRegisters(numNonZeroRegisters);
|
setNumNonZeroRegisters(numNonZeroRegisters);
|
||||||
if (numNonZeroRegisters == NUM_BUCKETS) {
|
if (numNonZeroRegisters == NUM_BUCKETS) {
|
||||||
setRegisterOffset(++registerOffset);
|
setRegisterOffset((byte) (registerOffset + 1));
|
||||||
setNumNonZeroRegisters(decrementBuckets());
|
setNumNonZeroRegisters(decrementBuckets());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -421,7 +421,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||||||
}
|
}
|
||||||
if (numNonZero == NUM_BUCKETS) {
|
if (numNonZero == NUM_BUCKETS) {
|
||||||
numNonZero = decrementBuckets();
|
numNonZero = decrementBuckets();
|
||||||
setRegisterOffset(++myOffset);
|
setRegisterOffset((byte) (myOffset + 1));
|
||||||
setNumNonZeroRegisters(numNonZero);
|
setNumNonZeroRegisters(numNonZero);
|
||||||
}
|
}
|
||||||
} else { // dense
|
} else { // dense
|
||||||
@ -437,7 +437,7 @@ public abstract class HyperLogLogCollector implements Comparable<HyperLogLogColl
|
|||||||
}
|
}
|
||||||
if (numNonZero == NUM_BUCKETS) {
|
if (numNonZero == NUM_BUCKETS) {
|
||||||
numNonZero = decrementBuckets();
|
numNonZero = decrementBuckets();
|
||||||
setRegisterOffset(++myOffset);
|
setRegisterOffset((byte) (myOffset + 1));
|
||||||
setNumNonZeroRegisters(numNonZero);
|
setNumNonZeroRegisters(numNonZero);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,7 @@ public class HyperLogLogCollectorBenchmark extends SimpleBenchmark
|
|||||||
{
|
{
|
||||||
private final HashFunction fn = Hashing.murmur3_128();
|
private final HashFunction fn = Hashing.murmur3_128();
|
||||||
|
|
||||||
|
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection") // TODO understand if this field should be used or not
|
||||||
private final List<HyperLogLogCollector> collectors = new ArrayList<>();
|
private final List<HyperLogLogCollector> collectors = new ArrayList<>();
|
||||||
|
|
||||||
@Param({"true"}) boolean targetIsDirect;
|
@Param({"true"}) boolean targetIsDirect;
|
||||||
|
@ -54,7 +54,7 @@ import org.apache.hadoop.io.BytesWritable;
|
|||||||
import org.apache.hadoop.io.SequenceFile;
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.SequenceFile.Writer;
|
import org.apache.hadoop.io.SequenceFile.Writer;
|
||||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
import org.apache.hadoop.mapreduce.JobContext;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.DateTimeComparator;
|
import org.joda.time.DateTimeComparator;
|
||||||
@ -524,7 +524,7 @@ public class IndexGeneratorJobTest
|
|||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
false,
|
false,
|
||||||
ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
|
ImmutableMap.of(MRJobConfig.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
|
||||||
false,
|
false,
|
||||||
useCombiner,
|
useCombiner,
|
||||||
null,
|
null,
|
||||||
|
@ -280,7 +280,7 @@ public class HadoopIndexTask extends HadoopTask implements ChatHandler
|
|||||||
toolbox.getSegmentPusher().getPathForHadoop()
|
toolbox.getSegmentPusher().getPathForHadoop()
|
||||||
};
|
};
|
||||||
|
|
||||||
HadoopIngestionSpec indexerSchema = null;
|
HadoopIngestionSpec indexerSchema;
|
||||||
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
|
final ClassLoader oldLoader = Thread.currentThread().getContextClassLoader();
|
||||||
Class<?> determinePartitionsRunnerClass = determinePartitionsInnerProcessingRunner.getClass();
|
Class<?> determinePartitionsRunnerClass = determinePartitionsInnerProcessingRunner.getClass();
|
||||||
Method determinePartitionsInnerProcessingRunTask = determinePartitionsRunnerClass.getMethod(
|
Method determinePartitionsInnerProcessingRunTask = determinePartitionsRunnerClass.getMethod(
|
||||||
|
@ -963,7 +963,7 @@ public class HttpRemoteTaskRunner implements WorkerTaskRunner, TaskLogStreamer
|
|||||||
pendingTasksExec.execute(
|
pendingTasksExec.execute(
|
||||||
() -> {
|
() -> {
|
||||||
while (!Thread.interrupted() && lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS)) {
|
while (!Thread.interrupted() && lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS)) {
|
||||||
ImmutableWorkerInfo immutableWorker = null;
|
ImmutableWorkerInfo immutableWorker;
|
||||||
HttpRemoteTaskRunnerWorkItem taskItem = null;
|
HttpRemoteTaskRunnerWorkItem taskItem = null;
|
||||||
try {
|
try {
|
||||||
synchronized (statusLock) {
|
synchronized (statusLock) {
|
||||||
|
@ -289,7 +289,7 @@ public class SupervisorManager
|
|||||||
metadataSupervisorManager.insert(id, spec);
|
metadataSupervisorManager.insert(id, spec);
|
||||||
}
|
}
|
||||||
|
|
||||||
Supervisor supervisor = null;
|
Supervisor supervisor;
|
||||||
try {
|
try {
|
||||||
supervisor = spec.createSupervisor();
|
supervisor = spec.createSupervisor();
|
||||||
supervisor.start();
|
supervisor.start();
|
||||||
|
@ -576,7 +576,7 @@ public abstract class WorkerTaskManager
|
|||||||
@Override
|
@Override
|
||||||
public void handle()
|
public void handle()
|
||||||
{
|
{
|
||||||
TaskAnnouncement announcement = null;
|
TaskAnnouncement announcement;
|
||||||
synchronized (lock) {
|
synchronized (lock) {
|
||||||
if (runningTasks.containsKey(task.getId()) || completedTasks.containsKey(task.getId())) {
|
if (runningTasks.containsKey(task.getId()) || completedTasks.containsKey(task.getId())) {
|
||||||
log.warn(
|
log.warn(
|
||||||
|
@ -174,7 +174,6 @@ public class IngestSegmentFirehoseFactoryTest
|
|||||||
final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null)
|
final IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(null, null, null)
|
||||||
{
|
{
|
||||||
private final Set<DataSegment> published = new HashSet<>();
|
private final Set<DataSegment> published = new HashSet<>();
|
||||||
private final Set<DataSegment> nuked = new HashSet<>();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval)
|
public List<DataSegment> getUsedSegmentsForInterval(String dataSource, Interval interval)
|
||||||
@ -210,7 +209,7 @@ public class IngestSegmentFirehoseFactoryTest
|
|||||||
@Override
|
@Override
|
||||||
public void deleteSegments(Set<DataSegment> segments)
|
public void deleteSegments(Set<DataSegment> segments)
|
||||||
{
|
{
|
||||||
nuked.addAll(segments);
|
// do nothing
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(
|
final LocalTaskActionClientFactory tac = new LocalTaskActionClientFactory(
|
||||||
@ -596,7 +595,6 @@ public class IngestSegmentFirehoseFactoryTest
|
|||||||
final int numSegmentsPerPartitionChunk = 5;
|
final int numSegmentsPerPartitionChunk = 5;
|
||||||
final int numPartitionChunksPerTimelineObject = 10;
|
final int numPartitionChunksPerTimelineObject = 10;
|
||||||
final int numSegments = numSegmentsPerPartitionChunk * numPartitionChunksPerTimelineObject;
|
final int numSegments = numSegmentsPerPartitionChunk * numPartitionChunksPerTimelineObject;
|
||||||
final List<DataSegment> segments = new ArrayList<>(numSegments);
|
|
||||||
final Interval interval = Intervals.of("2017-01-01/2017-01-02");
|
final Interval interval = Intervals.of("2017-01-01/2017-01-02");
|
||||||
final String version = "1";
|
final String version = "1";
|
||||||
|
|
||||||
@ -621,7 +619,6 @@ public class IngestSegmentFirehoseFactoryTest
|
|||||||
1,
|
1,
|
||||||
1
|
1
|
||||||
);
|
);
|
||||||
segments.add(segment);
|
|
||||||
|
|
||||||
final PartitionChunk<DataSegment> partitionChunk = new NumberedPartitionChunk<>(
|
final PartitionChunk<DataSegment> partitionChunk = new NumberedPartitionChunk<>(
|
||||||
i,
|
i,
|
||||||
|
@ -19,20 +19,15 @@
|
|||||||
|
|
||||||
package org.apache.druid.indexing.test;
|
package org.apache.druid.indexing.test;
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import org.apache.druid.segment.loading.DataSegmentKiller;
|
import org.apache.druid.segment.loading.DataSegmentKiller;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class TestDataSegmentKiller implements DataSegmentKiller
|
public class TestDataSegmentKiller implements DataSegmentKiller
|
||||||
{
|
{
|
||||||
private final Set<DataSegment> killedSegments = Sets.newConcurrentHashSet();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void kill(DataSegment segment)
|
public void kill(DataSegment segment)
|
||||||
{
|
{
|
||||||
killedSegments.add(segment);
|
// do nothing
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -19,19 +19,15 @@
|
|||||||
|
|
||||||
package org.apache.druid.indexing.test;
|
package org.apache.druid.indexing.test;
|
||||||
|
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import org.apache.druid.segment.loading.DataSegmentPusher;
|
import org.apache.druid.segment.loading.DataSegmentPusher;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
public class TestDataSegmentPusher implements DataSegmentPusher
|
public class TestDataSegmentPusher implements DataSegmentPusher
|
||||||
{
|
{
|
||||||
private final Set<DataSegment> pushedSegments = Sets.newConcurrentHashSet();
|
|
||||||
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
@Override
|
@Override
|
||||||
public String getPathForHadoop(String dataSource)
|
public String getPathForHadoop(String dataSource)
|
||||||
@ -48,7 +44,6 @@ public class TestDataSegmentPusher implements DataSegmentPusher
|
|||||||
@Override
|
@Override
|
||||||
public DataSegment push(File file, DataSegment segment, boolean useUniquePath)
|
public DataSegment push(File file, DataSegment segment, boolean useUniquePath)
|
||||||
{
|
{
|
||||||
pushedSegments.add(segment);
|
|
||||||
return segment;
|
return segment;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,12 +88,12 @@ public class CoordinatorResourceTestClient
|
|||||||
// return a list of the segment dates for the specified datasource
|
// return a list of the segment dates for the specified datasource
|
||||||
public List<String> getMetadataSegments(final String dataSource)
|
public List<String> getMetadataSegments(final String dataSource)
|
||||||
{
|
{
|
||||||
ArrayList<String> segments = null;
|
ArrayList<String> segments;
|
||||||
try {
|
try {
|
||||||
StatusResponseHolder response = makeRequest(HttpMethod.GET, getMetadataSegmentsURL(dataSource));
|
StatusResponseHolder response = makeRequest(HttpMethod.GET, getMetadataSegmentsURL(dataSource));
|
||||||
|
|
||||||
segments = jsonMapper.readValue(
|
segments = jsonMapper.readValue(
|
||||||
response.getContent(), new TypeReference<ArrayList<String>>()
|
response.getContent(), new TypeReference<List<String>>()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -107,12 +107,12 @@ public class CoordinatorResourceTestClient
|
|||||||
// return a list of the segment dates for the specified datasource
|
// return a list of the segment dates for the specified datasource
|
||||||
public List<String> getSegmentIntervals(final String dataSource)
|
public List<String> getSegmentIntervals(final String dataSource)
|
||||||
{
|
{
|
||||||
ArrayList<String> segments = null;
|
ArrayList<String> segments;
|
||||||
try {
|
try {
|
||||||
StatusResponseHolder response = makeRequest(HttpMethod.GET, getIntervalsURL(dataSource));
|
StatusResponseHolder response = makeRequest(HttpMethod.GET, getIntervalsURL(dataSource));
|
||||||
|
|
||||||
segments = jsonMapper.readValue(
|
segments = jsonMapper.readValue(
|
||||||
response.getContent(), new TypeReference<ArrayList<String>>()
|
response.getContent(), new TypeReference<List<String>>()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
@ -125,7 +125,7 @@ public class CoordinatorResourceTestClient
|
|||||||
|
|
||||||
private Map<String, Integer> getLoadStatus()
|
private Map<String, Integer> getLoadStatus()
|
||||||
{
|
{
|
||||||
Map<String, Integer> status = null;
|
Map<String, Integer> status;
|
||||||
try {
|
try {
|
||||||
StatusResponseHolder response = makeRequest(HttpMethod.GET, getLoadStatusURL());
|
StatusResponseHolder response = makeRequest(HttpMethod.GET, getLoadStatusURL());
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ public class ITHadoopIndexTest extends AbstractIndexerTest
|
|||||||
|
|
||||||
private void loadData(String hadoopDir)
|
private void loadData(String hadoopDir)
|
||||||
{
|
{
|
||||||
String indexerSpec = "";
|
String indexerSpec;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
LOG.info("indexerFile name: [%s]", BATCH_TASK);
|
LOG.info("indexerFile name: [%s]", BATCH_TASK);
|
||||||
|
@ -100,7 +100,7 @@ public abstract class AbstractITRealtimeIndexTaskTest extends AbstractIndexerTes
|
|||||||
TimeUnit.SECONDS.sleep(5);
|
TimeUnit.SECONDS.sleep(5);
|
||||||
|
|
||||||
// put the timestamps into the query structure
|
// put the timestamps into the query structure
|
||||||
String query_response_template = null;
|
String query_response_template;
|
||||||
InputStream is = ITRealtimeIndexTaskTest.class.getResourceAsStream(getQueriesResource());
|
InputStream is = ITRealtimeIndexTaskTest.class.getResourceAsStream(getQueriesResource());
|
||||||
if (null == is) {
|
if (null == is) {
|
||||||
throw new ISE("could not open query file: %s", getQueriesResource());
|
throw new ISE("could not open query file: %s", getQueriesResource());
|
||||||
|
@ -61,7 +61,7 @@ public class ITAppenderatorDriverRealtimeIndexTaskTest extends AbstractITRealtim
|
|||||||
final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
|
final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
|
||||||
eventReceiverSelector.start();
|
eventReceiverSelector.start();
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
InputStreamReader isr = null;
|
InputStreamReader isr;
|
||||||
try {
|
try {
|
||||||
isr = new InputStreamReader(
|
isr = new InputStreamReader(
|
||||||
ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
|
ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
|
||||||
|
@ -197,7 +197,6 @@ public class ITKafkaTest extends AbstractIndexerTest
|
|||||||
consumerProperties.put("zookeeper.connection.timeout.ms", "15000");
|
consumerProperties.put("zookeeper.connection.timeout.ms", "15000");
|
||||||
consumerProperties.put("zookeeper.sync.time.ms", "5000");
|
consumerProperties.put("zookeeper.sync.time.ms", "5000");
|
||||||
consumerProperties.put("group.id", Long.toString(System.currentTimeMillis()));
|
consumerProperties.put("group.id", Long.toString(System.currentTimeMillis()));
|
||||||
consumerProperties.put("zookeeper.sync.time.ms", "5000");
|
|
||||||
consumerProperties.put("fetch.message.max.bytes", "1048586");
|
consumerProperties.put("fetch.message.max.bytes", "1048586");
|
||||||
consumerProperties.put("auto.offset.reset", "smallest");
|
consumerProperties.put("auto.offset.reset", "smallest");
|
||||||
consumerProperties.put("auto.commit.enable", "false");
|
consumerProperties.put("auto.commit.enable", "false");
|
||||||
@ -249,20 +248,20 @@ public class ITKafkaTest extends AbstractIndexerTest
|
|||||||
segmentsExist = true;
|
segmentsExist = true;
|
||||||
|
|
||||||
// put the timestamps into the query structure
|
// put the timestamps into the query structure
|
||||||
String query_response_template = null;
|
String queryResponseTemplate;
|
||||||
InputStream is = ITKafkaTest.class.getResourceAsStream(QUERIES_FILE);
|
InputStream is = ITKafkaTest.class.getResourceAsStream(QUERIES_FILE);
|
||||||
if (null == is) {
|
if (null == is) {
|
||||||
throw new ISE("could not open query file: %s", QUERIES_FILE);
|
throw new ISE("could not open query file: %s", QUERIES_FILE);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
query_response_template = IOUtils.toString(is, "UTF-8");
|
queryResponseTemplate = IOUtils.toString(is, "UTF-8");
|
||||||
}
|
}
|
||||||
catch (IOException e) {
|
catch (IOException e) {
|
||||||
throw new ISE(e, "could not read query file: %s", QUERIES_FILE);
|
throw new ISE(e, "could not read query file: %s", QUERIES_FILE);
|
||||||
}
|
}
|
||||||
|
|
||||||
String queryStr = query_response_template
|
String queryStr = queryResponseTemplate
|
||||||
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
.replaceAll("%%DATASOURCE%%", DATASOURCE)
|
||||||
// time boundary
|
// time boundary
|
||||||
.replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
.replace("%%TIMEBOUNDARY_RESPONSE_TIMESTAMP%%", TIMESTAMP_FMT.print(dtFirst))
|
||||||
|
@ -74,7 +74,7 @@ public class ITRealtimeIndexTaskTest extends AbstractITRealtimeIndexTaskTest
|
|||||||
final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
|
final ServerDiscoverySelector eventReceiverSelector = factory.createSelector(EVENT_RECEIVER_SERVICE_NAME);
|
||||||
eventReceiverSelector.start();
|
eventReceiverSelector.start();
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
InputStreamReader isr = null;
|
InputStreamReader isr;
|
||||||
try {
|
try {
|
||||||
isr = new InputStreamReader(
|
isr = new InputStreamReader(
|
||||||
ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
|
ITRealtimeIndexTaskTest.class.getResourceAsStream(EVENT_DATA_FILE),
|
||||||
|
9
pom.xml
9
pom.xml
@ -337,7 +337,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.errorprone</groupId>
|
<groupId>com.google.errorprone</groupId>
|
||||||
<artifactId>error_prone_annotations</artifactId>
|
<artifactId>error_prone_annotations</artifactId>
|
||||||
<version>2.2.0</version>
|
<version>2.3.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.ibm.icu</groupId>
|
<groupId>com.ibm.icu</groupId>
|
||||||
@ -1265,8 +1265,8 @@
|
|||||||
<arg>-Xep:PreconditionsInvalidPlaceholder:ERROR</arg>
|
<arg>-Xep:PreconditionsInvalidPlaceholder:ERROR</arg>
|
||||||
<arg>-Xep:MissingOverride:ERROR</arg>
|
<arg>-Xep:MissingOverride:ERROR</arg>
|
||||||
<arg>-Xep:DefaultCharset:ERROR</arg>
|
<arg>-Xep:DefaultCharset:ERROR</arg>
|
||||||
|
<arg>-Xep:QualifierOrScopeOnInjectMethod:ERROR</arg>
|
||||||
|
|
||||||
<arg>-Xep:ArgumentParameterSwap</arg>
|
|
||||||
<arg>-Xep:AssistedInjectAndInjectOnSameConstructor</arg>
|
<arg>-Xep:AssistedInjectAndInjectOnSameConstructor</arg>
|
||||||
<arg>-Xep:AutoFactoryAtInject</arg>
|
<arg>-Xep:AutoFactoryAtInject</arg>
|
||||||
<arg>-Xep:ClassName</arg>
|
<arg>-Xep:ClassName</arg>
|
||||||
@ -1289,7 +1289,6 @@
|
|||||||
<arg>-Xep:NumericEquality</arg>
|
<arg>-Xep:NumericEquality</arg>
|
||||||
<arg>-Xep:ParameterPackage</arg>
|
<arg>-Xep:ParameterPackage</arg>
|
||||||
<arg>-Xep:ProtoStringFieldReferenceEquality</arg>
|
<arg>-Xep:ProtoStringFieldReferenceEquality</arg>
|
||||||
<arg>-Xep:QualifierOnMethodWithoutProvides</arg>
|
|
||||||
<arg>-Xep:UnlockMethod</arg>
|
<arg>-Xep:UnlockMethod</arg>
|
||||||
</compilerArgs>
|
</compilerArgs>
|
||||||
</configuration>
|
</configuration>
|
||||||
@ -1297,14 +1296,14 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.codehaus.plexus</groupId>
|
<groupId>org.codehaus.plexus</groupId>
|
||||||
<artifactId>plexus-compiler-javac-errorprone</artifactId>
|
<artifactId>plexus-compiler-javac-errorprone</artifactId>
|
||||||
<version>2.8.1</version>
|
<version>2.8.5</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- override plexus-compiler-javac-errorprone's dependency on
|
<!-- override plexus-compiler-javac-errorprone's dependency on
|
||||||
Error Prone with the latest version -->
|
Error Prone with the latest version -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.errorprone</groupId>
|
<groupId>com.google.errorprone</groupId>
|
||||||
<artifactId>error_prone_core</artifactId>
|
<artifactId>error_prone_core</artifactId>
|
||||||
<version>2.0.19</version>
|
<version>2.3.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
@ -95,11 +95,14 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
|
|
||||||
private final VirtualColumns virtualColumns;
|
private final VirtualColumns virtualColumns;
|
||||||
private final LimitSpec limitSpec;
|
private final LimitSpec limitSpec;
|
||||||
|
@Nullable
|
||||||
private final HavingSpec havingSpec;
|
private final HavingSpec havingSpec;
|
||||||
|
@Nullable
|
||||||
private final DimFilter dimFilter;
|
private final DimFilter dimFilter;
|
||||||
private final List<DimensionSpec> dimensions;
|
private final List<DimensionSpec> dimensions;
|
||||||
private final List<AggregatorFactory> aggregatorSpecs;
|
private final List<AggregatorFactory> aggregatorSpecs;
|
||||||
private final List<PostAggregator> postAggregatorSpecs;
|
private final List<PostAggregator> postAggregatorSpecs;
|
||||||
|
@Nullable
|
||||||
private final List<List<String>> subtotalsSpec;
|
private final List<List<String>> subtotalsSpec;
|
||||||
|
|
||||||
private final boolean applyLimitPushDown;
|
private final boolean applyLimitPushDown;
|
||||||
@ -115,9 +118,9 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
@JsonProperty("dimensions") List<DimensionSpec> dimensions,
|
@JsonProperty("dimensions") List<DimensionSpec> dimensions,
|
||||||
@JsonProperty("aggregations") List<AggregatorFactory> aggregatorSpecs,
|
@JsonProperty("aggregations") List<AggregatorFactory> aggregatorSpecs,
|
||||||
@JsonProperty("postAggregations") List<PostAggregator> postAggregatorSpecs,
|
@JsonProperty("postAggregations") List<PostAggregator> postAggregatorSpecs,
|
||||||
@JsonProperty("having") HavingSpec havingSpec,
|
@JsonProperty("having") @Nullable HavingSpec havingSpec,
|
||||||
@JsonProperty("limitSpec") LimitSpec limitSpec,
|
@JsonProperty("limitSpec") LimitSpec limitSpec,
|
||||||
@JsonProperty("subtotalsSpec") List<List<String>> subtotalsSpec,
|
@JsonProperty("subtotalsSpec") @Nullable List<List<String>> subtotalsSpec,
|
||||||
@JsonProperty("context") Map<String, Object> context
|
@JsonProperty("context") Map<String, Object> context
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
@ -168,12 +171,12 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
final DataSource dataSource,
|
final DataSource dataSource,
|
||||||
final QuerySegmentSpec querySegmentSpec,
|
final QuerySegmentSpec querySegmentSpec,
|
||||||
final VirtualColumns virtualColumns,
|
final VirtualColumns virtualColumns,
|
||||||
final DimFilter dimFilter,
|
final @Nullable DimFilter dimFilter,
|
||||||
final Granularity granularity,
|
final Granularity granularity,
|
||||||
final List<DimensionSpec> dimensions,
|
final @Nullable List<DimensionSpec> dimensions,
|
||||||
final List<AggregatorFactory> aggregatorSpecs,
|
final @Nullable List<AggregatorFactory> aggregatorSpecs,
|
||||||
final List<PostAggregator> postAggregatorSpecs,
|
final @Nullable List<PostAggregator> postAggregatorSpecs,
|
||||||
final HavingSpec havingSpec,
|
final @Nullable HavingSpec havingSpec,
|
||||||
final LimitSpec limitSpec,
|
final LimitSpec limitSpec,
|
||||||
final @Nullable List<List<String>> subtotalsSpec,
|
final @Nullable List<List<String>> subtotalsSpec,
|
||||||
final @Nullable Function<Sequence<Row>, Sequence<Row>> postProcessingFn,
|
final @Nullable Function<Sequence<Row>, Sequence<Row>> postProcessingFn,
|
||||||
@ -198,7 +201,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
this.havingSpec = havingSpec;
|
this.havingSpec = havingSpec;
|
||||||
this.limitSpec = LimitSpec.nullToNoopLimitSpec(limitSpec);
|
this.limitSpec = LimitSpec.nullToNoopLimitSpec(limitSpec);
|
||||||
|
|
||||||
this.subtotalsSpec = verifySubtotalsSpec(subtotalsSpec, dimensions);
|
this.subtotalsSpec = verifySubtotalsSpec(subtotalsSpec, this.dimensions);
|
||||||
|
|
||||||
// Verify no duplicate names between dimensions, aggregators, and postAggregators.
|
// Verify no duplicate names between dimensions, aggregators, and postAggregators.
|
||||||
// They will all end up in the same namespace in the returned Rows and we can't have them clobbering each other.
|
// They will all end up in the same namespace in the returned Rows and we can't have them clobbering each other.
|
||||||
@ -211,7 +214,11 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
this.applyLimitPushDown = determineApplyLimitPushDown();
|
this.applyLimitPushDown = determineApplyLimitPushDown();
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<List<String>> verifySubtotalsSpec(List<List<String>> subtotalsSpec, List<DimensionSpec> dimensions)
|
@Nullable
|
||||||
|
private List<List<String>> verifySubtotalsSpec(
|
||||||
|
@Nullable List<List<String>> subtotalsSpec,
|
||||||
|
List<DimensionSpec> dimensions
|
||||||
|
)
|
||||||
{
|
{
|
||||||
// if subtotalsSpec exists then validate that all are subsets of dimensions spec and are in same order.
|
// if subtotalsSpec exists then validate that all are subsets of dimensions spec and are in same order.
|
||||||
// For example if we had {D1, D2, D3} in dimensions spec then
|
// For example if we had {D1, D2, D3} in dimensions spec then
|
||||||
@ -736,20 +743,37 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
|
|
||||||
public static class Builder
|
public static class Builder
|
||||||
{
|
{
|
||||||
|
@Nullable
|
||||||
|
private static List<List<String>> copySubtotalSpec(@Nullable List<List<String>> subtotalsSpec)
|
||||||
|
{
|
||||||
|
if (subtotalsSpec == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return subtotalsSpec.stream().map(ArrayList::new).collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
private DataSource dataSource;
|
private DataSource dataSource;
|
||||||
private QuerySegmentSpec querySegmentSpec;
|
private QuerySegmentSpec querySegmentSpec;
|
||||||
private VirtualColumns virtualColumns;
|
private VirtualColumns virtualColumns;
|
||||||
|
@Nullable
|
||||||
private DimFilter dimFilter;
|
private DimFilter dimFilter;
|
||||||
private Granularity granularity;
|
private Granularity granularity;
|
||||||
|
@Nullable
|
||||||
private List<DimensionSpec> dimensions;
|
private List<DimensionSpec> dimensions;
|
||||||
|
@Nullable
|
||||||
private List<AggregatorFactory> aggregatorSpecs;
|
private List<AggregatorFactory> aggregatorSpecs;
|
||||||
|
@Nullable
|
||||||
private List<PostAggregator> postAggregatorSpecs;
|
private List<PostAggregator> postAggregatorSpecs;
|
||||||
|
@Nullable
|
||||||
private HavingSpec havingSpec;
|
private HavingSpec havingSpec;
|
||||||
|
|
||||||
private Map<String, Object> context;
|
private Map<String, Object> context;
|
||||||
|
|
||||||
|
@Nullable
|
||||||
private List<List<String>> subtotalsSpec = null;
|
private List<List<String>> subtotalsSpec = null;
|
||||||
|
@Nullable
|
||||||
private LimitSpec limitSpec = null;
|
private LimitSpec limitSpec = null;
|
||||||
|
@Nullable
|
||||||
private Function<Sequence<Row>, Sequence<Row>> postProcessingFn;
|
private Function<Sequence<Row>, Sequence<Row>> postProcessingFn;
|
||||||
private List<OrderByColumnSpec> orderByColumnSpecs = new ArrayList<>();
|
private List<OrderByColumnSpec> orderByColumnSpecs = new ArrayList<>();
|
||||||
private int limit = Integer.MAX_VALUE;
|
private int limit = Integer.MAX_VALUE;
|
||||||
@ -787,6 +811,7 @@ public class GroupByQuery extends BaseQuery<Row>
|
|||||||
postAggregatorSpecs = builder.postAggregatorSpecs;
|
postAggregatorSpecs = builder.postAggregatorSpecs;
|
||||||
havingSpec = builder.havingSpec;
|
havingSpec = builder.havingSpec;
|
||||||
limitSpec = builder.limitSpec;
|
limitSpec = builder.limitSpec;
|
||||||
|
subtotalsSpec = copySubtotalSpec(builder.subtotalsSpec);
|
||||||
postProcessingFn = builder.postProcessingFn;
|
postProcessingFn = builder.postProcessingFn;
|
||||||
limit = builder.limit;
|
limit = builder.limit;
|
||||||
orderByColumnSpecs = new ArrayList<>(builder.orderByColumnSpecs);
|
orderByColumnSpecs = new ArrayList<>(builder.orderByColumnSpecs);
|
||||||
|
@ -233,7 +233,7 @@ public class BufferArrayGrouper implements IntGrouper
|
|||||||
|
|
||||||
return new CloseableIterator<Entry<Integer>>()
|
return new CloseableIterator<Entry<Integer>>()
|
||||||
{
|
{
|
||||||
int cur = -1;
|
int cur;
|
||||||
boolean findNext = false;
|
boolean findNext = false;
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -173,6 +173,7 @@ public class BufferHashGrouper<KeyType> extends AbstractBufferHashGrouper<KeyTyp
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (sorted) {
|
if (sorted) {
|
||||||
|
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
|
||||||
final List<Integer> wrappedOffsets = new AbstractList<Integer>()
|
final List<Integer> wrappedOffsets = new AbstractList<Integer>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
|
@ -254,6 +254,7 @@ public class LimitedBufferHashGrouper<KeyType> extends AbstractBufferHashGrouper
|
|||||||
{
|
{
|
||||||
final int size = offsetHeap.getHeapSize();
|
final int size = offsetHeap.getHeapSize();
|
||||||
|
|
||||||
|
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
|
||||||
final List<Integer> wrappedOffsets = new AbstractList<Integer>()
|
final List<Integer> wrappedOffsets = new AbstractList<Integer>()
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
|
@ -72,7 +72,7 @@ public class SpillingGrouper<KeyType> implements Grouper<KeyType>
|
|||||||
private final List<File> dictionaryFiles = new ArrayList<>();
|
private final List<File> dictionaryFiles = new ArrayList<>();
|
||||||
private final boolean sortHasNonGroupingFields;
|
private final boolean sortHasNonGroupingFields;
|
||||||
|
|
||||||
private boolean spillingAllowed = false;
|
private boolean spillingAllowed;
|
||||||
|
|
||||||
public SpillingGrouper(
|
public SpillingGrouper(
|
||||||
final Supplier<ByteBuffer> bufferSupplier,
|
final Supplier<ByteBuffer> bufferSupplier,
|
||||||
|
@ -122,7 +122,7 @@ public class StringComparators
|
|||||||
int ch1 = str1.codePointAt(pos[0]);
|
int ch1 = str1.codePointAt(pos[0]);
|
||||||
int ch2 = str2.codePointAt(pos[1]);
|
int ch2 = str2.codePointAt(pos[1]);
|
||||||
|
|
||||||
int result = 0;
|
int result;
|
||||||
|
|
||||||
if (isDigit(ch1)) {
|
if (isDigit(ch1)) {
|
||||||
result = isDigit(ch2) ? compareNumbers(str1, str2, pos) : -1;
|
result = isDigit(ch2) ? compareNumbers(str1, str2, pos) : -1;
|
||||||
@ -135,7 +135,7 @@ public class StringComparators
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return str1.length() - str2.length();
|
return Integer.compare(str1.length(), str2.length());
|
||||||
}
|
}
|
||||||
|
|
||||||
private int compareNumbers(String str0, String str1, int[] pos)
|
private int compareNumbers(String str0, String str1, int[] pos)
|
||||||
|
@ -33,7 +33,7 @@ public class ScanQueryLimitRowIterator implements CloseableIterator<ScanResultVa
|
|||||||
{
|
{
|
||||||
private Yielder<ScanResultValue> yielder;
|
private Yielder<ScanResultValue> yielder;
|
||||||
private String resultFormat;
|
private String resultFormat;
|
||||||
private long limit = 0;
|
private long limit;
|
||||||
private long count = 0;
|
private long count = 0;
|
||||||
|
|
||||||
public ScanQueryLimitRowIterator(
|
public ScanQueryLimitRowIterator(
|
||||||
|
@ -285,22 +285,22 @@ public class SearchQueryQueryToolChest extends QueryToolChest<Result<SearchResul
|
|||||||
@Override
|
@Override
|
||||||
public SearchHit apply(@Nullable Object input)
|
public SearchHit apply(@Nullable Object input)
|
||||||
{
|
{
|
||||||
String dim = null;
|
String dim;
|
||||||
String val = null;
|
String val;
|
||||||
Integer cnt = null;
|
Integer count;
|
||||||
if (input instanceof Map) {
|
if (input instanceof Map) {
|
||||||
dim = outputNameMap.get((String) ((Map) input).get("dimension"));
|
dim = outputNameMap.get((String) ((Map) input).get("dimension"));
|
||||||
val = (String) ((Map) input).get("value");
|
val = (String) ((Map) input).get("value");
|
||||||
cnt = (Integer) ((Map) input).get("count");
|
count = (Integer) ((Map) input).get("count");
|
||||||
} else if (input instanceof SearchHit) {
|
} else if (input instanceof SearchHit) {
|
||||||
SearchHit cached = (SearchHit) input;
|
SearchHit cached = (SearchHit) input;
|
||||||
dim = outputNameMap.get(cached.getDimension());
|
dim = outputNameMap.get(cached.getDimension());
|
||||||
val = cached.getValue();
|
val = cached.getValue();
|
||||||
cnt = cached.getCount();
|
count = cached.getCount();
|
||||||
} else {
|
} else {
|
||||||
throw new IAE("Unknown format [%s]", input.getClass());
|
throw new IAE("Unknown format [%s]", input.getClass());
|
||||||
}
|
}
|
||||||
return new SearchHit(dim, val, cnt);
|
return new SearchHit(dim, val, count);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -23,7 +23,6 @@ import com.google.common.collect.Lists;
|
|||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
import com.google.common.collect.MinMaxPriorityQueue;
|
import com.google.common.collect.MinMaxPriorityQueue;
|
||||||
import com.google.common.primitives.Longs;
|
import com.google.common.primitives.Longs;
|
||||||
import org.apache.druid.java.util.common.guava.Comparators;
|
|
||||||
import org.apache.druid.query.Result;
|
import org.apache.druid.query.Result;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
@ -137,7 +136,7 @@ public class SelectResultValueBuilder
|
|||||||
protected Queue<EventHolder> instantiatePQueue()
|
protected Queue<EventHolder> instantiatePQueue()
|
||||||
{
|
{
|
||||||
int threshold = pagingSpec.getThreshold();
|
int threshold = pagingSpec.getThreshold();
|
||||||
return MinMaxPriorityQueue.orderedBy(descending ? Comparators.inverse(comparator) : comparator)
|
return MinMaxPriorityQueue.orderedBy(descending ? comparator.reversed() : comparator)
|
||||||
.maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE)
|
.maximumSize(threshold > 0 ? threshold : Integer.MAX_VALUE)
|
||||||
.create();
|
.create();
|
||||||
}
|
}
|
||||||
|
@ -84,7 +84,7 @@ public class AggregateTopNMetricFirstAlgorithm implements TopNAlgorithm<int[], T
|
|||||||
|
|
||||||
PooledTopNAlgorithm singleMetricAlgo = new PooledTopNAlgorithm(storageAdapter, singleMetricQuery, bufferPool);
|
PooledTopNAlgorithm singleMetricAlgo = new PooledTopNAlgorithm(storageAdapter, singleMetricQuery, bufferPool);
|
||||||
PooledTopNAlgorithm.PooledTopNParams singleMetricParam = null;
|
PooledTopNAlgorithm.PooledTopNParams singleMetricParam = null;
|
||||||
int[] dimValSelector = null;
|
int[] dimValSelector;
|
||||||
try {
|
try {
|
||||||
singleMetricParam = singleMetricAlgo.makeInitParams(params.getSelectorPlus(), params.getCursor());
|
singleMetricParam = singleMetricAlgo.makeInitParams(params.getSelectorPlus(), params.getCursor());
|
||||||
singleMetricAlgo.run(
|
singleMetricAlgo.run(
|
||||||
|
@ -21,7 +21,6 @@ package org.apache.druid.query.topn;
|
|||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import org.apache.druid.java.util.common.guava.Comparators;
|
|
||||||
import org.apache.druid.query.aggregation.AggregatorFactory;
|
import org.apache.druid.query.aggregation.AggregatorFactory;
|
||||||
import org.apache.druid.query.aggregation.PostAggregator;
|
import org.apache.druid.query.aggregation.PostAggregator;
|
||||||
import org.apache.druid.query.dimension.DimensionSpec;
|
import org.apache.druid.query.dimension.DimensionSpec;
|
||||||
@ -67,23 +66,7 @@ public class InvertedTopNMetricSpec implements TopNMetricSpec
|
|||||||
final List<PostAggregator> postAggregatorSpecs
|
final List<PostAggregator> postAggregatorSpecs
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
return Comparators.inverse(
|
return Comparator.nullsFirst(delegate.getComparator(aggregatorSpecs, postAggregatorSpecs).reversed());
|
||||||
new Comparator()
|
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public int compare(Object o1, Object o2)
|
|
||||||
{
|
|
||||||
// nulls last
|
|
||||||
if (o1 == null) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
if (o2 == null) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return delegate.getComparator(aggregatorSpecs, postAggregatorSpecs).compare(o1, o2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -78,7 +78,6 @@ import java.util.List;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.TreeSet;
|
|
||||||
|
|
||||||
public class IndexIO
|
public class IndexIO
|
||||||
{
|
{
|
||||||
@ -484,14 +483,6 @@ public class IndexIO
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<String> colSet = new TreeSet<>();
|
|
||||||
for (String dimension : index.getAvailableDimensions()) {
|
|
||||||
colSet.add(dimension);
|
|
||||||
}
|
|
||||||
for (String metric : index.getAvailableMetrics()) {
|
|
||||||
colSet.add(metric);
|
|
||||||
}
|
|
||||||
|
|
||||||
columns.put(
|
columns.put(
|
||||||
ColumnHolder.TIME_COLUMN_NAME,
|
ColumnHolder.TIME_COLUMN_NAME,
|
||||||
new ColumnBuilder()
|
new ColumnBuilder()
|
||||||
|
@ -47,7 +47,7 @@ public class BlockLayoutColumnarLongsSerializer implements ColumnarLongsSerializ
|
|||||||
private int numInserted = 0;
|
private int numInserted = 0;
|
||||||
private int numInsertedForNextFlush;
|
private int numInsertedForNextFlush;
|
||||||
|
|
||||||
private ByteBuffer endBuffer = null;
|
private ByteBuffer endBuffer;
|
||||||
|
|
||||||
BlockLayoutColumnarLongsSerializer(
|
BlockLayoutColumnarLongsSerializer(
|
||||||
SegmentWriteOutMedium segmentWriteOutMedium,
|
SegmentWriteOutMedium segmentWriteOutMedium,
|
||||||
|
@ -397,7 +397,7 @@ public class Filters
|
|||||||
{
|
{
|
||||||
private final int bitmapIndexCardinality = bitmapIndex.getCardinality();
|
private final int bitmapIndexCardinality = bitmapIndex.getCardinality();
|
||||||
private int nextIndex = 0;
|
private int nextIndex = 0;
|
||||||
private int found = -1;
|
private int found;
|
||||||
|
|
||||||
{
|
{
|
||||||
found = findNextIndex();
|
found = findNextIndex();
|
||||||
|
@ -174,7 +174,7 @@ public class LikeFilter implements Filter
|
|||||||
return new IntIterator()
|
return new IntIterator()
|
||||||
{
|
{
|
||||||
int currIndex = startIndex;
|
int currIndex = startIndex;
|
||||||
int found = -1;
|
int found;
|
||||||
|
|
||||||
{
|
{
|
||||||
found = findNext();
|
found = findNext();
|
||||||
|
@ -322,7 +322,7 @@ public class JavaScriptAggregatorTest
|
|||||||
|
|
||||||
// warmup
|
// warmup
|
||||||
int i = 0;
|
int i = 0;
|
||||||
long t = 0;
|
long t;
|
||||||
while (i < 10000) {
|
while (i < 10000) {
|
||||||
aggregate(selector, aggRhino);
|
aggregate(selector, aggRhino);
|
||||||
++i;
|
++i;
|
||||||
|
@ -137,9 +137,6 @@ public class SelectBinaryFnTest
|
|||||||
Assert.assertEquals(res1.getTimestamp(), merged.getTimestamp());
|
Assert.assertEquals(res1.getTimestamp(), merged.getTimestamp());
|
||||||
|
|
||||||
LinkedHashMap<String, Integer> expectedPageIds = Maps.newLinkedHashMap();
|
LinkedHashMap<String, Integer> expectedPageIds = Maps.newLinkedHashMap();
|
||||||
expectedPageIds.put(segmentId1, 0);
|
|
||||||
expectedPageIds.put(segmentId2, 0);
|
|
||||||
expectedPageIds.put(segmentId2, 1);
|
|
||||||
expectedPageIds.put(segmentId1, 1);
|
expectedPageIds.put(segmentId1, 1);
|
||||||
expectedPageIds.put(segmentId2, 2);
|
expectedPageIds.put(segmentId2, 2);
|
||||||
|
|
||||||
|
@ -24,7 +24,6 @@ import com.google.common.collect.Iterables;
|
|||||||
import com.google.common.io.CharSource;
|
import com.google.common.io.CharSource;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.druid.java.util.common.DateTimes;
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
import org.apache.druid.java.util.common.Intervals;
|
|
||||||
import org.apache.druid.java.util.common.granularity.Granularities;
|
import org.apache.druid.java.util.common.granularity.Granularities;
|
||||||
import org.apache.druid.query.Druids;
|
import org.apache.druid.query.Druids;
|
||||||
import org.apache.druid.query.QueryPlus;
|
import org.apache.druid.query.QueryPlus;
|
||||||
@ -40,7 +39,6 @@ import org.apache.druid.segment.TestIndex;
|
|||||||
import org.apache.druid.segment.incremental.IncrementalIndex;
|
import org.apache.druid.segment.incremental.IncrementalIndex;
|
||||||
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
|
import org.apache.druid.segment.incremental.IncrementalIndexSchema;
|
||||||
import org.apache.druid.timeline.DataSegment;
|
import org.apache.druid.timeline.DataSegment;
|
||||||
import org.apache.druid.timeline.TimelineObjectHolder;
|
|
||||||
import org.apache.druid.timeline.VersionedIntervalTimeline;
|
import org.apache.druid.timeline.VersionedIntervalTimeline;
|
||||||
import org.apache.druid.timeline.partition.NoneShardSpec;
|
import org.apache.druid.timeline.partition.NoneShardSpec;
|
||||||
import org.apache.druid.timeline.partition.SingleElementPartitionChunk;
|
import org.apache.druid.timeline.partition.SingleElementPartitionChunk;
|
||||||
@ -80,7 +78,6 @@ public class TimeBoundaryQueryRunnerTest
|
|||||||
);
|
);
|
||||||
private static Segment segment0;
|
private static Segment segment0;
|
||||||
private static Segment segment1;
|
private static Segment segment1;
|
||||||
private static List<String> segmentIdentifiers;
|
|
||||||
|
|
||||||
public TimeBoundaryQueryRunnerTest(
|
public TimeBoundaryQueryRunnerTest(
|
||||||
QueryRunner runner
|
QueryRunner runner
|
||||||
@ -157,11 +154,6 @@ public class TimeBoundaryQueryRunnerTest
|
|||||||
timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk(segment0));
|
timeline.add(index0.getInterval(), "v1", new SingleElementPartitionChunk(segment0));
|
||||||
timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1));
|
timeline.add(index1.getInterval(), "v1", new SingleElementPartitionChunk(segment1));
|
||||||
|
|
||||||
segmentIdentifiers = new ArrayList<>();
|
|
||||||
for (TimelineObjectHolder<String, ?> holder : timeline.lookup(Intervals.of("2011-01-12/2011-01-17"))) {
|
|
||||||
segmentIdentifiers.add(makeIdentifier(holder.getInterval(), holder.getVersion()));
|
|
||||||
}
|
|
||||||
|
|
||||||
return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory);
|
return QueryRunnerTestHelper.makeFilteringQueryRunner(timeline, factory);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,6 +57,7 @@ import org.apache.druid.query.topn.TopNQueryBuilder;
|
|||||||
import org.apache.druid.query.topn.TopNResultValue;
|
import org.apache.druid.query.topn.TopNResultValue;
|
||||||
import org.apache.druid.segment.incremental.IncrementalIndex;
|
import org.apache.druid.segment.incremental.IncrementalIndex;
|
||||||
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
|
import org.apache.druid.segment.writeout.SegmentWriteOutMediumFactory;
|
||||||
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.runner.RunWith;
|
import org.junit.runner.RunWith;
|
||||||
import org.junit.runners.Parameterized;
|
import org.junit.runners.Parameterized;
|
||||||
@ -167,6 +168,7 @@ public class SchemalessTestSimpleTest
|
|||||||
|
|
||||||
// @Test TODO: Handling of null values is inconsistent right now, need to make it all consistent and re-enable test
|
// @Test TODO: Handling of null values is inconsistent right now, need to make it all consistent and re-enable test
|
||||||
// TODO: Complain to Eric when you see this. It shouldn't be like this...
|
// TODO: Complain to Eric when you see this. It shouldn't be like this...
|
||||||
|
@Ignore
|
||||||
@SuppressWarnings("unused")
|
@SuppressWarnings("unused")
|
||||||
public void testFullOnTopN()
|
public void testFullOnTopN()
|
||||||
{
|
{
|
||||||
|
@ -137,7 +137,6 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
checkNotNull(e);
|
checkNotNull(e);
|
||||||
checkSize(e);
|
checkSize(e);
|
||||||
long nanos = unit.toNanos(timeout);
|
long nanos = unit.toNanos(timeout);
|
||||||
boolean added = false;
|
|
||||||
putLock.lockInterruptibly();
|
putLock.lockInterruptibly();
|
||||||
try {
|
try {
|
||||||
while (currentSize.get() + getBytesSize(e) > capacity) {
|
while (currentSize.get() + getBytesSize(e) > capacity) {
|
||||||
@ -148,16 +147,12 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
}
|
}
|
||||||
delegate.add(e);
|
delegate.add(e);
|
||||||
elementAdded(e);
|
elementAdded(e);
|
||||||
added = true;
|
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
putLock.unlock();
|
putLock.unlock();
|
||||||
}
|
}
|
||||||
if (added) {
|
|
||||||
signalNotEmpty();
|
signalNotEmpty();
|
||||||
}
|
return true;
|
||||||
return added;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -222,12 +217,12 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
if (c == this) {
|
if (c == this) {
|
||||||
throw new IllegalArgumentException();
|
throw new IllegalArgumentException();
|
||||||
}
|
}
|
||||||
int n = 0;
|
int n;
|
||||||
takeLock.lock();
|
takeLock.lock();
|
||||||
try {
|
try {
|
||||||
// elementCount.get provides visibility to first n Nodes
|
// elementCount.get provides visibility to first n Nodes
|
||||||
n = Math.min(maxElements, elementCount.get());
|
n = Math.min(maxElements, elementCount.get());
|
||||||
if (n < 0) {
|
if (n <= 0) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
for (int i = 0; i < n; i++) {
|
for (int i = 0; i < n; i++) {
|
||||||
@ -239,9 +234,7 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
finally {
|
finally {
|
||||||
takeLock.unlock();
|
takeLock.unlock();
|
||||||
}
|
}
|
||||||
if (n > 0) {
|
|
||||||
signalNotFull();
|
signalNotFull();
|
||||||
}
|
|
||||||
return n;
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -250,7 +243,7 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
{
|
{
|
||||||
checkNotNull(e);
|
checkNotNull(e);
|
||||||
checkSize(e);
|
checkSize(e);
|
||||||
boolean added = false;
|
boolean added;
|
||||||
putLock.lock();
|
putLock.lock();
|
||||||
try {
|
try {
|
||||||
if (currentSize.get() + getBytesSize(e) > capacity) {
|
if (currentSize.get() + getBytesSize(e) > capacity) {
|
||||||
@ -274,7 +267,7 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
@Override
|
@Override
|
||||||
public E poll()
|
public E poll()
|
||||||
{
|
{
|
||||||
E e = null;
|
E e;
|
||||||
takeLock.lock();
|
takeLock.lock();
|
||||||
try {
|
try {
|
||||||
e = delegate.poll();
|
e = delegate.poll();
|
||||||
@ -295,7 +288,7 @@ public abstract class BytesBoundedLinkedQueue<E> extends AbstractQueue<E> implem
|
|||||||
public E poll(long timeout, TimeUnit unit) throws InterruptedException
|
public E poll(long timeout, TimeUnit unit) throws InterruptedException
|
||||||
{
|
{
|
||||||
long nanos = unit.toNanos(timeout);
|
long nanos = unit.toNanos(timeout);
|
||||||
E e = null;
|
E e;
|
||||||
takeLock.lockInterruptibly();
|
takeLock.lockInterruptibly();
|
||||||
try {
|
try {
|
||||||
while (elementCount.get() == 0) {
|
while (elementCount.get() == 0) {
|
||||||
|
@ -639,7 +639,7 @@ public abstract class SQLMetadataConnector implements MetadataStorageConnector
|
|||||||
{
|
{
|
||||||
MetadataStorageConnectorConfig connectorConfig = getConfig();
|
MetadataStorageConnectorConfig connectorConfig = getConfig();
|
||||||
|
|
||||||
BasicDataSource dataSource = null;
|
BasicDataSource dataSource;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Properties dbcpProperties = connectorConfig.getDbcpProperties();
|
Properties dbcpProperties = connectorConfig.getDbcpProperties();
|
||||||
|
@ -242,7 +242,7 @@ public class EventReceiverFirehoseFactory implements FirehoseFactory<InputRowPar
|
|||||||
}
|
}
|
||||||
|
|
||||||
CountingInputStream countingInputStream = new CountingInputStream(in);
|
CountingInputStream countingInputStream = new CountingInputStream(in);
|
||||||
Collection<Map<String, Object>> events = null;
|
Collection<Map<String, Object>> events;
|
||||||
try {
|
try {
|
||||||
events = objectMapper.readValue(
|
events = objectMapper.readValue(
|
||||||
countingInputStream,
|
countingInputStream,
|
||||||
|
@ -154,7 +154,7 @@ public class ReplicationThrottler
|
|||||||
lifetime = maxLifetime;
|
lifetime = maxLifetime;
|
||||||
lifetimes.put(tier, lifetime);
|
lifetimes.put(tier, lifetime);
|
||||||
}
|
}
|
||||||
lifetimes.put(tier, --lifetime);
|
lifetimes.put(tier, lifetime - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void resetLifetime(String tier)
|
public void resetLifetime(String tier)
|
||||||
|
@ -47,7 +47,7 @@ public class SegmentReplicantLookup
|
|||||||
if (numReplicants == null) {
|
if (numReplicants == null) {
|
||||||
numReplicants = 0;
|
numReplicants = 0;
|
||||||
}
|
}
|
||||||
segmentsInCluster.put(segment.getIdentifier(), server.getTier(), ++numReplicants);
|
segmentsInCluster.put(segment.getIdentifier(), server.getTier(), numReplicants + 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also account for queued segments
|
// Also account for queued segments
|
||||||
@ -56,7 +56,7 @@ public class SegmentReplicantLookup
|
|||||||
if (numReplicants == null) {
|
if (numReplicants == null) {
|
||||||
numReplicants = 0;
|
numReplicants = 0;
|
||||||
}
|
}
|
||||||
loadingSegments.put(segment.getIdentifier(), server.getTier(), ++numReplicants);
|
loadingSegments.put(segment.getIdentifier(), server.getTier(), numReplicants + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -276,7 +276,7 @@ public class DatasourcesResource
|
|||||||
return Response.noContent().build();
|
return Response.noContent().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
|
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
|
||||||
|
|
||||||
if (full != null) {
|
if (full != null) {
|
||||||
final Map<Interval, Map<String, Object>> retVal = new TreeMap<>(comparator);
|
final Map<Interval, Map<String, Object>> retVal = new TreeMap<>(comparator);
|
||||||
@ -342,7 +342,7 @@ public class DatasourcesResource
|
|||||||
return Response.noContent().build();
|
return Response.noContent().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
|
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
|
||||||
if (full != null) {
|
if (full != null) {
|
||||||
final Map<Interval, Map<String, Object>> retVal = new TreeMap<>(comparator);
|
final Map<Interval, Map<String, Object>> retVal = new TreeMap<>(comparator);
|
||||||
for (DataSegment dataSegment : dataSource.getSegments()) {
|
for (DataSegment dataSegment : dataSource.getSegments()) {
|
||||||
@ -385,7 +385,7 @@ public class DatasourcesResource
|
|||||||
return Response.ok(retVal).build();
|
return Response.ok(retVal).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
final Set<String> retVal = new TreeSet<>(Comparators.inverse(String.CASE_INSENSITIVE_ORDER));
|
final Set<String> retVal = new TreeSet<>(String.CASE_INSENSITIVE_ORDER.reversed());
|
||||||
for (DataSegment dataSegment : dataSource.getSegments()) {
|
for (DataSegment dataSegment : dataSource.getSegments()) {
|
||||||
if (theInterval.contains(dataSegment.getInterval())) {
|
if (theInterval.contains(dataSegment.getInterval())) {
|
||||||
retVal.add(dataSegment.getIdentifier());
|
retVal.add(dataSegment.getIdentifier());
|
||||||
|
@ -70,7 +70,7 @@ public class IntervalsResource
|
|||||||
@Produces(MediaType.APPLICATION_JSON)
|
@Produces(MediaType.APPLICATION_JSON)
|
||||||
public Response getIntervals(@Context final HttpServletRequest req)
|
public Response getIntervals(@Context final HttpServletRequest req)
|
||||||
{
|
{
|
||||||
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
|
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
|
||||||
final Set<ImmutableDruidDataSource> datasources = InventoryViewUtils.getSecuredDataSources(
|
final Set<ImmutableDruidDataSource> datasources = InventoryViewUtils.getSecuredDataSources(
|
||||||
req,
|
req,
|
||||||
serverInventoryView,
|
serverInventoryView,
|
||||||
@ -80,11 +80,7 @@ public class IntervalsResource
|
|||||||
final Map<Interval, Map<String, Map<String, Object>>> retVal = new TreeMap<>(comparator);
|
final Map<Interval, Map<String, Map<String, Object>>> retVal = new TreeMap<>(comparator);
|
||||||
for (ImmutableDruidDataSource dataSource : datasources) {
|
for (ImmutableDruidDataSource dataSource : datasources) {
|
||||||
for (DataSegment dataSegment : dataSource.getSegments()) {
|
for (DataSegment dataSegment : dataSource.getSegments()) {
|
||||||
Map<String, Map<String, Object>> interval = retVal.get(dataSegment.getInterval());
|
retVal.computeIfAbsent(dataSegment.getInterval(), i -> new HashMap<>());
|
||||||
if (interval == null) {
|
|
||||||
Map<String, Map<String, Object>> tmp = new HashMap<>();
|
|
||||||
retVal.put(dataSegment.getInterval(), tmp);
|
|
||||||
}
|
|
||||||
setProperties(retVal, dataSource, dataSegment);
|
setProperties(retVal, dataSource, dataSegment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109,18 +105,14 @@ public class IntervalsResource
|
|||||||
authorizerMapper
|
authorizerMapper
|
||||||
);
|
);
|
||||||
|
|
||||||
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
|
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
|
||||||
|
|
||||||
if (full != null) {
|
if (full != null) {
|
||||||
final Map<Interval, Map<String, Map<String, Object>>> retVal = new TreeMap<>(comparator);
|
final Map<Interval, Map<String, Map<String, Object>>> retVal = new TreeMap<>(comparator);
|
||||||
for (ImmutableDruidDataSource dataSource : datasources) {
|
for (ImmutableDruidDataSource dataSource : datasources) {
|
||||||
for (DataSegment dataSegment : dataSource.getSegments()) {
|
for (DataSegment dataSegment : dataSource.getSegments()) {
|
||||||
if (theInterval.contains(dataSegment.getInterval())) {
|
if (theInterval.contains(dataSegment.getInterval())) {
|
||||||
Map<String, Map<String, Object>> dataSourceInterval = retVal.get(dataSegment.getInterval());
|
retVal.computeIfAbsent(dataSegment.getInterval(), k -> new HashMap<>());
|
||||||
if (dataSourceInterval == null) {
|
|
||||||
Map<String, Map<String, Object>> tmp = new HashMap<>();
|
|
||||||
retVal.put(dataSegment.getInterval(), tmp);
|
|
||||||
}
|
|
||||||
setProperties(retVal, dataSource, dataSegment);
|
setProperties(retVal, dataSource, dataSegment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -482,7 +482,7 @@ public class LookupCoordinatorResource
|
|||||||
)
|
)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
Collection<String> tiers = null;
|
Collection<String> tiers;
|
||||||
if (discover) {
|
if (discover) {
|
||||||
tiers = lookupCoordinatorManager.discoverTiers();
|
tiers = lookupCoordinatorManager.discoverTiers();
|
||||||
} else {
|
} else {
|
||||||
|
@ -57,7 +57,7 @@ public class JettyServerInitUtils
|
|||||||
for (ServletFilterHolder servletFilterHolder : extensionFilters) {
|
for (ServletFilterHolder servletFilterHolder : extensionFilters) {
|
||||||
// Check the Filter first to guard against people who don't read the docs and return the Class even
|
// Check the Filter first to guard against people who don't read the docs and return the Class even
|
||||||
// when they have an instance.
|
// when they have an instance.
|
||||||
FilterHolder holder = null;
|
FilterHolder holder;
|
||||||
if (servletFilterHolder.getFilter() != null) {
|
if (servletFilterHolder.getFilter() != null) {
|
||||||
holder = new FilterHolder(servletFilterHolder.getFilter());
|
holder = new FilterHolder(servletFilterHolder.getFilter());
|
||||||
} else if (servletFilterHolder.getFilterClass() != null) {
|
} else if (servletFilterHolder.getFilterClass() != null) {
|
||||||
|
@ -24,7 +24,6 @@ import org.apache.druid.java.util.common.ISE;
|
|||||||
import org.apache.druid.java.util.emitter.EmittingLogger;
|
import org.apache.druid.java.util.emitter.EmittingLogger;
|
||||||
import org.apache.druid.query.QueryInterruptedException;
|
import org.apache.druid.query.QueryInterruptedException;
|
||||||
import org.apache.druid.server.DruidNode;
|
import org.apache.druid.server.DruidNode;
|
||||||
import org.eclipse.jetty.server.Response;
|
|
||||||
|
|
||||||
import javax.servlet.Filter;
|
import javax.servlet.Filter;
|
||||||
import javax.servlet.FilterChain;
|
import javax.servlet.FilterChain;
|
||||||
@ -94,7 +93,7 @@ public class PreResponseAuthorizationCheckFilter implements Filter
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (authInfoChecked != null && !authInfoChecked && response.getStatus() != Response.SC_FORBIDDEN) {
|
if (authInfoChecked != null && !authInfoChecked && response.getStatus() != HttpServletResponse.SC_FORBIDDEN) {
|
||||||
handleAuthorizationCheckError(
|
handleAuthorizationCheckError(
|
||||||
"Request's authorization check failed but status code was not 403.",
|
"Request's authorization check failed but status code was not 403.",
|
||||||
request,
|
request,
|
||||||
@ -134,7 +133,7 @@ public class PreResponseAuthorizationCheckFilter implements Filter
|
|||||||
);
|
);
|
||||||
unauthorizedError.setStackTrace(new StackTraceElement[0]);
|
unauthorizedError.setStackTrace(new StackTraceElement[0]);
|
||||||
OutputStream out = response.getOutputStream();
|
OutputStream out = response.getOutputStream();
|
||||||
sendJsonError(response, Response.SC_UNAUTHORIZED, jsonMapper.writeValueAsString(unauthorizedError), out);
|
sendJsonError(response, HttpServletResponse.SC_UNAUTHORIZED, jsonMapper.writeValueAsString(unauthorizedError), out);
|
||||||
out.close();
|
out.close();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -157,7 +156,7 @@ public class PreResponseAuthorizationCheckFilter implements Filter
|
|||||||
throw new ISE(errorMsg);
|
throw new ISE(errorMsg);
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
servletResponse.sendError(Response.SC_FORBIDDEN);
|
servletResponse.sendError(HttpServletResponse.SC_FORBIDDEN);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
@ -23,7 +23,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
|||||||
import org.apache.druid.java.util.common.logger.Logger;
|
import org.apache.druid.java.util.common.logger.Logger;
|
||||||
import org.apache.druid.query.QueryInterruptedException;
|
import org.apache.druid.query.QueryInterruptedException;
|
||||||
import org.apache.druid.server.DruidNode;
|
import org.apache.druid.server.DruidNode;
|
||||||
import org.eclipse.jetty.server.Response;
|
|
||||||
|
|
||||||
import javax.servlet.Filter;
|
import javax.servlet.Filter;
|
||||||
import javax.servlet.FilterChain;
|
import javax.servlet.FilterChain;
|
||||||
@ -79,7 +78,7 @@ public class SecuritySanityCheckFilter implements Filter
|
|||||||
|
|
||||||
AuthenticationResult result = (AuthenticationResult) request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT);
|
AuthenticationResult result = (AuthenticationResult) request.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT);
|
||||||
if (authInfoChecked != null || result != null || allowUnsecured != null) {
|
if (authInfoChecked != null || result != null || allowUnsecured != null) {
|
||||||
sendJsonError(httpResponse, Response.SC_FORBIDDEN, unauthorizedMessage, out);
|
sendJsonError(httpResponse, HttpServletResponse.SC_FORBIDDEN, unauthorizedMessage, out);
|
||||||
out.close();
|
out.close();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -2875,6 +2875,9 @@ public class CachingClusteredClientTest
|
|||||||
@Override
|
@Override
|
||||||
public boolean equals(Object o)
|
public boolean equals(Object o)
|
||||||
{
|
{
|
||||||
|
if (!(o instanceof DataSegment)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
return baseSegment.equals(o);
|
return baseSegment.equals(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,7 +183,7 @@ public class BytesBoundedLinkedQueueTest
|
|||||||
Assert.assertFalse(q.offer(new TestObject(2), delayMS, TimeUnit.MILLISECONDS));
|
Assert.assertFalse(q.offer(new TestObject(2), delayMS, TimeUnit.MILLISECONDS));
|
||||||
}
|
}
|
||||||
|
|
||||||
// @Test
|
@Test
|
||||||
public void testConcurrentOperations() throws Exception
|
public void testConcurrentOperations() throws Exception
|
||||||
{
|
{
|
||||||
final BlockingQueue<TestObject> q = getQueue(Integer.MAX_VALUE);
|
final BlockingQueue<TestObject> q = getQueue(Integer.MAX_VALUE);
|
||||||
@ -240,8 +240,7 @@ public class BytesBoundedLinkedQueueTest
|
|||||||
public Boolean call()
|
public Boolean call()
|
||||||
{
|
{
|
||||||
while (!stopTest.get()) {
|
while (!stopTest.get()) {
|
||||||
System.out
|
q.drainTo(new ArrayList<>(), Integer.MAX_VALUE);
|
||||||
.println("drained elements : " + q.drainTo(new ArrayList<TestObject>(), Integer.MAX_VALUE));
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -172,8 +172,8 @@ public class HybridCacheTest
|
|||||||
Assert.assertEquals(Sets.newHashSet(key3), res.keySet());
|
Assert.assertEquals(Sets.newHashSet(key3), res.keySet());
|
||||||
Assert.assertArrayEquals(value3, res.get(key3));
|
Assert.assertArrayEquals(value3, res.get(key3));
|
||||||
|
|
||||||
Assert.assertEquals(++hits, cache.getStats().getNumHits());
|
Assert.assertEquals(hits + 1, cache.getStats().getNumHits());
|
||||||
Assert.assertEquals(++misses, cache.getStats().getNumMisses());
|
Assert.assertEquals(misses + 1, cache.getStats().getNumMisses());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -131,7 +131,7 @@ public class MemcachedCacheBenchmark extends SimpleBenchmark
|
|||||||
|
|
||||||
public long timeGetObject(int reps)
|
public long timeGetObject(int reps)
|
||||||
{
|
{
|
||||||
byte[] bytes = null;
|
byte[] bytes;
|
||||||
long count = 0;
|
long count = 0;
|
||||||
for (int i = 0; i < reps; i++) {
|
for (int i = 0; i < reps; i++) {
|
||||||
for (int k = 0; k < objectCount; ++k) {
|
for (int k = 0; k < objectCount; ++k) {
|
||||||
|
@ -30,9 +30,6 @@ import org.easymock.EasyMock;
|
|||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
|
||||||
import java.util.concurrent.ConcurrentMap;
|
|
||||||
|
|
||||||
public class AppenderatorPlumberTest
|
public class AppenderatorPlumberTest
|
||||||
{
|
{
|
||||||
private final AppenderatorPlumber plumber;
|
private final AppenderatorPlumber plumber;
|
||||||
@ -94,43 +91,29 @@ public class AppenderatorPlumberTest
|
|||||||
@Test
|
@Test
|
||||||
public void testSimpleIngestion() throws Exception
|
public void testSimpleIngestion() throws Exception
|
||||||
{
|
{
|
||||||
|
|
||||||
final ConcurrentMap<String, String> commitMetadata = new ConcurrentHashMap<>();
|
|
||||||
|
|
||||||
Appenderator appenderator = appenderatorTester.getAppenderator();
|
Appenderator appenderator = appenderatorTester.getAppenderator();
|
||||||
|
|
||||||
// startJob
|
// startJob
|
||||||
Assert.assertEquals(null, plumber.startJob());
|
Assert.assertEquals(null, plumber.startJob());
|
||||||
|
|
||||||
// getDataSource
|
// getDataSource
|
||||||
Assert.assertEquals(AppenderatorTester.DATASOURCE,
|
Assert.assertEquals(AppenderatorTester.DATASOURCE, appenderator.getDataSource());
|
||||||
appenderator.getDataSource());
|
|
||||||
|
|
||||||
InputRow[] rows = new InputRow[] {AppenderatorTest.IR("2000", "foo", 1),
|
InputRow[] rows = new InputRow[] {AppenderatorTest.IR("2000", "foo", 1),
|
||||||
AppenderatorTest.IR("2000", "bar", 2), AppenderatorTest.IR("2000", "qux", 4)};
|
AppenderatorTest.IR("2000", "bar", 2), AppenderatorTest.IR("2000", "qux", 4)};
|
||||||
// add
|
// add
|
||||||
commitMetadata.put("x", "1");
|
Assert.assertEquals(1, plumber.add(rows[0], null).getRowCount());
|
||||||
Assert.assertEquals(
|
|
||||||
1,
|
|
||||||
plumber.add(rows[0], null).getRowCount());
|
|
||||||
|
|
||||||
commitMetadata.put("x", "2");
|
Assert.assertEquals(2, plumber.add(rows[1], null).getRowCount());
|
||||||
Assert.assertEquals(
|
|
||||||
2,
|
|
||||||
plumber.add(rows[1], null).getRowCount());
|
|
||||||
|
|
||||||
commitMetadata.put("x", "3");
|
Assert.assertEquals(3, plumber.add(rows[2], null).getRowCount());
|
||||||
Assert.assertEquals(
|
|
||||||
3,
|
|
||||||
plumber.add(rows[2], null).getRowCount());
|
|
||||||
|
|
||||||
|
|
||||||
Assert.assertEquals(1, plumber.getSegmentsView().size());
|
Assert.assertEquals(1, plumber.getSegmentsView().size());
|
||||||
|
|
||||||
SegmentIdentifier si = plumber.getSegmentsView().values().toArray(new SegmentIdentifier[0])[0];
|
SegmentIdentifier si = plumber.getSegmentsView().values().toArray(new SegmentIdentifier[0])[0];
|
||||||
|
|
||||||
Assert.assertEquals(3,
|
Assert.assertEquals(3, appenderator.getRowCount(si));
|
||||||
appenderator.getRowCount(si));
|
|
||||||
|
|
||||||
appenderator.clear();
|
appenderator.clear();
|
||||||
Assert.assertTrue(appenderator.getSegments().isEmpty());
|
Assert.assertTrue(appenderator.getSegments().isEmpty());
|
||||||
|
@ -307,14 +307,13 @@ public class CuratorDruidCoordinatorTest extends CuratorTestBase
|
|||||||
DataSegment segmentToMove = sourceSegments.get(2);
|
DataSegment segmentToMove = sourceSegments.get(2);
|
||||||
|
|
||||||
List<String> sourceSegKeys = new ArrayList<>();
|
List<String> sourceSegKeys = new ArrayList<>();
|
||||||
List<String> destSegKeys = new ArrayList<>();
|
|
||||||
|
|
||||||
for (DataSegment segment : sourceSegments) {
|
for (DataSegment segment : sourceSegments) {
|
||||||
sourceSegKeys.add(announceBatchSegmentsForServer(source, ImmutableSet.of(segment), zkPathsConfig, jsonMapper));
|
sourceSegKeys.add(announceBatchSegmentsForServer(source, ImmutableSet.of(segment), zkPathsConfig, jsonMapper));
|
||||||
}
|
}
|
||||||
|
|
||||||
for (DataSegment segment : destinationSegments) {
|
for (DataSegment segment : destinationSegments) {
|
||||||
destSegKeys.add(announceBatchSegmentsForServer(dest, ImmutableSet.of(segment), zkPathsConfig, jsonMapper));
|
announceBatchSegmentsForServer(dest, ImmutableSet.of(segment), zkPathsConfig, jsonMapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
Assert.assertTrue(timing.forWaiting().awaitLatch(segmentViewInitLatch));
|
Assert.assertTrue(timing.forWaiting().awaitLatch(segmentViewInitLatch));
|
||||||
|
@ -94,7 +94,6 @@ public class JavaScriptTieredBrokerSelectorStrategyTest
|
|||||||
public void testGetBrokerServiceName()
|
public void testGetBrokerServiceName()
|
||||||
{
|
{
|
||||||
final LinkedHashMap<String, String> tierBrokerMap = new LinkedHashMap<>();
|
final LinkedHashMap<String, String> tierBrokerMap = new LinkedHashMap<>();
|
||||||
tierBrokerMap.put("fast", "druid/fastBroker");
|
|
||||||
tierBrokerMap.put("fast", "druid/broker");
|
tierBrokerMap.put("fast", "druid/broker");
|
||||||
tierBrokerMap.put("slow", "druid/slowBroker");
|
tierBrokerMap.put("slow", "druid/slowBroker");
|
||||||
|
|
||||||
|
@ -117,7 +117,6 @@ class CoordinatorJettyServerInitializer implements JettyServerInitializer
|
|||||||
final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
|
final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
|
||||||
final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class);
|
final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class);
|
||||||
|
|
||||||
List<Authenticator> authenticators = null;
|
|
||||||
AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
|
AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
|
||||||
|
|
||||||
// perform no-op authorization for these resources
|
// perform no-op authorization for these resources
|
||||||
@ -128,7 +127,7 @@ class CoordinatorJettyServerInitializer implements JettyServerInitializer
|
|||||||
AuthenticationUtils.addNoopAuthorizationFilters(root, CliOverlord.UNSECURED_PATHS);
|
AuthenticationUtils.addNoopAuthorizationFilters(root, CliOverlord.UNSECURED_PATHS);
|
||||||
}
|
}
|
||||||
|
|
||||||
authenticators = authenticatorMapper.getAuthenticatorChain();
|
List<Authenticator> authenticators = authenticatorMapper.getAuthenticatorChain();
|
||||||
AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
|
AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
|
||||||
|
|
||||||
AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions());
|
AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions());
|
||||||
|
@ -73,6 +73,7 @@ public class PullDependencies implements Runnable
|
|||||||
{
|
{
|
||||||
private static final Logger log = new Logger(PullDependencies.class);
|
private static final Logger log = new Logger(PullDependencies.class);
|
||||||
|
|
||||||
|
@SuppressWarnings("MismatchedQueryAndUpdateOfCollection")
|
||||||
private static final Set<String> exclusions = new HashSet<>(
|
private static final Set<String> exclusions = new HashSet<>(
|
||||||
/*
|
/*
|
||||||
|
|
||||||
|
@ -93,14 +93,13 @@ public class QueryJettyServerInitializer implements JettyServerInitializer
|
|||||||
final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
|
final ObjectMapper jsonMapper = injector.getInstance(Key.get(ObjectMapper.class, Json.class));
|
||||||
final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class);
|
final AuthenticatorMapper authenticatorMapper = injector.getInstance(AuthenticatorMapper.class);
|
||||||
|
|
||||||
List<Authenticator> authenticators = null;
|
|
||||||
AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
|
AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
|
||||||
|
|
||||||
// perform no-op authorization for these resources
|
// perform no-op authorization for these resources
|
||||||
AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS);
|
AuthenticationUtils.addNoopAuthorizationFilters(root, UNSECURED_PATHS);
|
||||||
AuthenticationUtils.addNoopAuthorizationFilters(root, authConfig.getUnsecuredPaths());
|
AuthenticationUtils.addNoopAuthorizationFilters(root, authConfig.getUnsecuredPaths());
|
||||||
|
|
||||||
authenticators = authenticatorMapper.getAuthenticatorChain();
|
List<Authenticator> authenticators = authenticatorMapper.getAuthenticatorChain();
|
||||||
AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
|
AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
|
||||||
|
|
||||||
AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions());
|
AuthenticationUtils.addAllowOptionsFilter(root, authConfig.isAllowUnauthenticatedHttpOptions());
|
||||||
|
@ -20,7 +20,6 @@
|
|||||||
package org.apache.druid.sql.avatica;
|
package org.apache.druid.sql.avatica;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Predicate;
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import com.google.common.collect.ImmutableSortedMap;
|
import com.google.common.collect.ImmutableSortedMap;
|
||||||
@ -29,7 +28,6 @@ import com.google.common.collect.Sets;
|
|||||||
import org.apache.druid.java.util.common.ISE;
|
import org.apache.druid.java.util.common.ISE;
|
||||||
import org.apache.druid.java.util.common.logger.Logger;
|
import org.apache.druid.java.util.common.logger.Logger;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
|
||||||
import javax.annotation.concurrent.GuardedBy;
|
import javax.annotation.concurrent.GuardedBy;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -85,17 +83,9 @@ public class DruidConnection
|
|||||||
|
|
||||||
// remove sensitive fields from the context, only the connection's context needs to have authentication
|
// remove sensitive fields from the context, only the connection's context needs to have authentication
|
||||||
// credentials
|
// credentials
|
||||||
Map<String, Object> sanitizedContext = new HashMap<>();
|
Map<String, Object> sanitizedContext = Maps.filterEntries(
|
||||||
sanitizedContext = Maps.filterEntries(
|
|
||||||
context,
|
context,
|
||||||
new Predicate<Map.Entry<String, Object>>()
|
e -> !SENSITIVE_CONTEXT_FIELDS.contains(e.getKey())
|
||||||
{
|
|
||||||
@Override
|
|
||||||
public boolean apply(@Nullable Map.Entry<String, Object> input)
|
|
||||||
{
|
|
||||||
return !SENSITIVE_CONTEXT_FIELDS.contains(input.getKey());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
);
|
||||||
|
|
||||||
final DruidStatement statement = new DruidStatement(
|
final DruidStatement statement = new DruidStatement(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user