Refactor JoinFilterAnalyzer - part 2 (#9929)

* Refactor JoinFilterAnalyzer

This patch attempts to make it easier to follow the join filter analysis code
with the hope of making it easier to add rewrite optimizations in the future.

To keep the patch small and easy to review, this is the first of at least 2
patches that are planned.

This patch adds a builder to the Pre-Analysis, so that it is easier to
instantiate the preAnalysis. It also moves some of the filter normalization
code out to Fitlers with associated tests.

* fix tests

* Refactor JoinFilterAnalyzer - part 2

This change introduces the following components:
 * RhsRewriteCandidates - a wrapper for a list of candidates and associated
     functions to operate on the set of candidates.
 * JoinableClauses - a wrapper for the list of JoinableClause that represent
     a join condition and the associated functions to operate on the clauses.
 * Equiconditions - a wrapper representing the equiconditions that are used
     in the join condition.

And associated test changes.

This refactoring surfaced 2 bugs:
 - Missing equals and hashcode implementation for RhsRewriteCandidate, thus
   allowing potential duplicates in the rhs rewrite candidates
 - Missing Filter#supportsRequiredColumnRewrite check in
   analyzeJoinFilterClause, which could result in UnsupportedOperationException
   being thrown by the filter

* fix compile error

* remove unused class
This commit is contained in:
Suneet Saldanha 2020-05-29 15:03:35 -07:00 committed by GitHub
parent a33705f0e3
commit 9c40bebc02
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 687 additions and 348 deletions

View File

@ -50,6 +50,7 @@ import org.apache.druid.segment.join.JoinType;
import org.apache.druid.segment.join.JoinableClause;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.segment.join.lookup.LookupJoinable;
import org.apache.druid.segment.join.table.IndexedTableJoinable;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
@ -140,7 +141,7 @@ public class JoinAndLookupBenchmark
)
);
JoinFilterPreAnalysis preAnalysisLookupStringKey = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClausesLookupStringKey,
JoinableClauses.fromList(joinableClausesLookupStringKey),
VirtualColumns.EMPTY,
null,
false,
@ -167,7 +168,7 @@ public class JoinAndLookupBenchmark
)
);
JoinFilterPreAnalysis preAnalysisLookupLongKey = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClausesLookupLongKey,
JoinableClauses.fromList(joinableClausesLookupLongKey),
VirtualColumns.EMPTY,
null,
false,
@ -194,7 +195,7 @@ public class JoinAndLookupBenchmark
)
);
JoinFilterPreAnalysis preAnalysisIndexedTableStringKey = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClausesIndexedTableStringKey,
JoinableClauses.fromList(joinableClausesIndexedTableStringKey),
VirtualColumns.EMPTY,
null,
false,
@ -221,7 +222,7 @@ public class JoinAndLookupBenchmark
)
);
JoinFilterPreAnalysis preAnalysisIndexedTableLongKey = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClausesIndexedTableLonggKey,
JoinableClauses.fromList(joinableClausesIndexedTableLonggKey),
VirtualColumns.EMPTY,
null,
false,

View File

@ -45,6 +45,7 @@ import org.apache.druid.segment.data.CloseableIndexed;
import org.apache.druid.segment.data.Indexed;
import org.apache.druid.segment.filter.cnf.CalciteCnfHelper;
import org.apache.druid.segment.filter.cnf.HiveCnfHelper;
import org.apache.druid.segment.join.filter.AllNullColumnSelectorFactory;
import javax.annotation.Nullable;
import java.io.IOException;
@ -61,6 +62,7 @@ import java.util.stream.Collectors;
*/
public class Filters
{
private static final ColumnSelectorFactory ALL_NULL_COLUMN_SELECTOR_FACTORY = new AllNullColumnSelectorFactory();
/**
* Convert a list of DimFilters to a list of Filters.
@ -543,4 +545,11 @@ public class Filters
}
return normalizedOrClauses;
}
public static boolean filterMatchesNull(Filter filter)
{
ValueMatcher valueMatcher = filter.makeMatcher(ALL_NULL_COLUMN_SELECTOR_FACTORY);
return valueMatcher.matches();
}
}

View File

@ -20,7 +20,6 @@
package org.apache.druid.segment.join;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.query.planning.PreJoinableClause;
import org.apache.druid.segment.Segment;
@ -28,16 +27,14 @@ import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.utils.JvmUtils;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Utility methods for working with {@link Joinable} related classes.
@ -110,7 +107,7 @@ public class Joinables
if (clauses.isEmpty()) {
return Function.identity();
} else {
final List<JoinableClause> joinableClauses = createJoinableClauses(clauses, joinableFactory);
final JoinableClauses joinableClauses = JoinableClauses.createClauses(clauses, joinableFactory);
JoinFilterPreAnalysis jfpa = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
virtualColumns,
@ -120,51 +117,12 @@ public class Joinables
enableRewriteValueColumnFilters,
filterRewriteMaxSize
);
return baseSegment -> new HashJoinSegment(baseSegment, joinableClauses, jfpa);
return baseSegment -> new HashJoinSegment(baseSegment, joinableClauses.getJoinableClauses(), jfpa);
}
}
);
}
/**
* Returns a list of {@link JoinableClause} corresponding to a list of {@link PreJoinableClause}. This will call
* {@link JoinableFactory#build} on each one and therefore may be an expensive operation.
*/
private static List<JoinableClause> createJoinableClauses(
final List<PreJoinableClause> clauses,
final JoinableFactory joinableFactory
)
{
// Since building a JoinableClause can be expensive, check for prefix conflicts before building
checkPreJoinableClausesForDuplicatesAndShadowing(clauses);
return clauses.stream().map(preJoinableClause -> {
final Optional<Joinable> joinable = joinableFactory.build(
preJoinableClause.getDataSource(),
preJoinableClause.getCondition()
);
return new JoinableClause(
preJoinableClause.getPrefix(),
joinable.orElseThrow(() -> new ISE("dataSource is not joinable: %s", preJoinableClause.getDataSource())),
preJoinableClause.getJoinType(),
preJoinableClause.getCondition()
);
}).collect(Collectors.toList());
}
private static void checkPreJoinableClausesForDuplicatesAndShadowing(
final List<PreJoinableClause> preJoinableClauses
)
{
List<String> prefixes = new ArrayList<>();
for (PreJoinableClause clause : preJoinableClauses) {
prefixes.add(clause.getPrefix());
}
checkPrefixesForDuplicatesAndShadowing(prefixes);
}
/**
* Check if any prefixes in the provided list duplicate or shadow each other.
*

View File

@ -0,0 +1,60 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.query.filter.Filter;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Set;
public class Equiconditions
{
@Nonnull private final Map<String, Set<Expr>> equiconditions;
public Equiconditions(Map<String, Set<Expr>> equiconditions)
{
this.equiconditions = equiconditions;
}
/**
* @param filterClause the filter.
* @return true if direct join filter rewrite is supported for the provided filter
*/
public boolean doesFilterSupportDirectJoinFilterRewrite(Filter filterClause)
{
if (filterClause.supportsRequiredColumnRewrite()) {
Set<String> requiredColumns = filterClause.getRequiredColumns();
if (requiredColumns.size() == 1) {
String reqColumn = requiredColumns.iterator().next();
return equiconditions.containsKey(reqColumn);
}
}
return false;
}
@Nullable
public Set<Expr> getLhsExprs(String rhsColumn)
{
return equiconditions.get(rhsColumn);
}
}

View File

@ -25,8 +25,6 @@ import org.apache.druid.java.util.common.Pair;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.query.filter.InDimFilter;
import org.apache.druid.query.filter.ValueMatcher;
import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.column.ValueType;
@ -36,15 +34,15 @@ import org.apache.druid.segment.filter.SelectorFilter;
import org.apache.druid.segment.join.Equality;
import org.apache.druid.segment.join.JoinConditionAnalysis;
import org.apache.druid.segment.join.JoinableClause;
import org.apache.druid.segment.join.filter.rewrite.RhsRewriteCandidate;
import org.apache.druid.segment.join.filter.rewrite.RhsRewriteCandidates;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@ -80,7 +78,6 @@ import java.util.Set;
public class JoinFilterAnalyzer
{
private static final String PUSH_DOWN_VIRTUAL_COLUMN_NAME_BASE = "JOIN-FILTER-PUSHDOWN-VIRTUAL-COLUMN-";
private static final ColumnSelectorFactory ALL_NULL_COLUMN_SELECTOR_FACTORY = new AllNullColumnSelectorFactory();
/**
* Before making per-segment filter splitting decisions, we first do a pre-analysis step
@ -103,7 +100,7 @@ public class JoinFilterAnalyzer
* @return A JoinFilterPreAnalysis containing information determined in this pre-analysis step.
*/
public static JoinFilterPreAnalysis computeJoinFilterPreAnalysis(
List<JoinableClause> joinableClauses,
JoinableClauses joinableClauses,
VirtualColumns virtualColumns,
Filter originalFilter,
boolean enableFilterPushDown,
@ -115,7 +112,7 @@ public class JoinFilterAnalyzer
final List<VirtualColumn> preJoinVirtualColumns = new ArrayList<>();
final List<VirtualColumn> postJoinVirtualColumns = new ArrayList<>();
splitVirtualColumns(joinableClauses, virtualColumns, preJoinVirtualColumns, postJoinVirtualColumns);
joinableClauses.splitVirtualColumns(virtualColumns, preJoinVirtualColumns, postJoinVirtualColumns);
JoinFilterPreAnalysis.Builder preAnalysisBuilder =
new JoinFilterPreAnalysis.Builder(joinableClauses, originalFilter, postJoinVirtualColumns)
.withEnableFilterPushDown(enableFilterPushDown)
@ -131,7 +128,7 @@ public class JoinFilterAnalyzer
for (Filter orClause : normalizedOrClauses) {
Set<String> reqColumns = orClause.getRequiredColumns();
if (areSomeColumnsFromJoin(joinableClauses, reqColumns) || areSomeColumnsFromPostJoinVirtualColumns(
if (joinableClauses.areSomeColumnsFromJoin(reqColumns) || areSomeColumnsFromPostJoinVirtualColumns(
postJoinVirtualColumns,
reqColumns
)) {
@ -148,15 +145,15 @@ public class JoinFilterAnalyzer
}
// build the equicondition map, used for determining how the tables are connected through joins
Map<String, Set<Expr>> equiconditions = preAnalysisBuilder.computeEquiconditionsFromJoinableClauses();
Equiconditions equiconditions = preAnalysisBuilder.computeEquiconditionsFromJoinableClauses();
Set<RhsRewriteCandidate> rhsRewriteCandidates = getRhsRewriteCandidates(normalizedJoinTableClauses, equiconditions, joinableClauses);
RhsRewriteCandidates rhsRewriteCandidates =
RhsRewriteCandidates.getRhsRewriteCandidates(normalizedJoinTableClauses, equiconditions, joinableClauses);
// Build a map of RHS table prefix -> JoinFilterColumnCorrelationAnalysis based on the RHS rewrite candidates
Map<String, Optional<Map<String, JoinFilterColumnCorrelationAnalysis>>> correlationsByPrefix = new HashMap<>();
Map<String, Optional<JoinFilterColumnCorrelationAnalysis>> directRewriteCorrelations = new HashMap<>();
for (RhsRewriteCandidate rhsRewriteCandidate : rhsRewriteCandidates) {
for (RhsRewriteCandidate rhsRewriteCandidate : rhsRewriteCandidates.getRhsRewriteCandidates()) {
if (rhsRewriteCandidate.isDirectRewrite()) {
directRewriteCorrelations.computeIfAbsent(
rhsRewriteCandidate.getRhsColumn(),
@ -202,7 +199,7 @@ public class JoinFilterAnalyzer
// (See JoinFilterAnalyzerTest.test_filterPushDown_factToRegionOneColumnToTwoRHSColumnsAndFilterOnRHS for an example)
Map<String, List<JoinFilterColumnCorrelationAnalysis>> correlationsByFilteringColumn = new LinkedHashMap<>();
Map<String, List<JoinFilterColumnCorrelationAnalysis>> correlationsByDirectFilteringColumn = new LinkedHashMap<>();
for (RhsRewriteCandidate rhsRewriteCandidate : rhsRewriteCandidates) {
for (RhsRewriteCandidate rhsRewriteCandidate : rhsRewriteCandidates.getRhsRewriteCandidates()) {
if (rhsRewriteCandidate.isDirectRewrite()) {
List<JoinFilterColumnCorrelationAnalysis> perColumnCorrelations =
correlationsByDirectFilteringColumn.computeIfAbsent(
@ -277,61 +274,6 @@ public class JoinFilterAnalyzer
return preAnalysisBuilder.build();
}
private static Optional<RhsRewriteCandidate> determineRhsRewriteCandidatesForSingleFilter(
Filter orClause,
Map<String, Set<Expr>> equiconditions,
List<JoinableClause> joinableClauses
)
{
// Check if the filter clause is on the RHS join column. If so, we can rewrite the clause to filter on the
// LHS join column instead.
// Currently, we only support rewrites of filters that operate on a single column for simplicity.
Set<String> requiredColumns = orClause.getRequiredColumns();
if (orClause.supportsRequiredColumnRewrite() &&
doesRequiredColumnSetSupportDirectJoinFilterRewrite(requiredColumns, equiconditions)) {
String reqColumn = requiredColumns.iterator().next();
JoinableClause joinableClause = isColumnFromJoin(joinableClauses, reqColumn);
return Optional.of(
new RhsRewriteCandidate(
joinableClause,
reqColumn,
null,
true
)
);
} else if (orClause instanceof SelectorFilter) {
// this is a candidate for RHS filter rewrite, determine column correlations and correlated values
String reqColumn = ((SelectorFilter) orClause).getDimension();
String reqValue = ((SelectorFilter) orClause).getValue();
JoinableClause joinableClause = isColumnFromJoin(joinableClauses, reqColumn);
if (joinableClause != null) {
return Optional.of(
new RhsRewriteCandidate(
joinableClause,
reqColumn,
reqValue,
false
)
);
}
}
return Optional.empty();
}
private static boolean doesRequiredColumnSetSupportDirectJoinFilterRewrite(
Set<String> requiredColumns,
Map<String, Set<Expr>> equiconditions
)
{
if (requiredColumns.size() == 1) {
String reqColumn = requiredColumns.iterator().next();
return equiconditions.containsKey(reqColumn);
}
return false;
}
/**
* @param joinFilterPreAnalysis The pre-analysis computed by {@link #computeJoinFilterPreAnalysis)}
*
@ -355,7 +297,7 @@ public class JoinFilterAnalyzer
Map<Expr, VirtualColumn> pushDownVirtualColumnsForLhsExprs = new HashMap<>();
for (Filter baseTableFilter : joinFilterPreAnalysis.getNormalizedBaseTableClauses()) {
if (!filterMatchesNull(baseTableFilter)) {
if (!Filters.filterMatchesNull(baseTableFilter)) {
leftFilters.add(baseTableFilter);
} else {
rightFilters.add(baseTableFilter);
@ -411,7 +353,7 @@ public class JoinFilterAnalyzer
// NULL matching conditions are not currently pushed down.
// They require special consideration based on the join type, and for simplicity of the initial implementation
// this is not currently handled.
if (!joinFilterPreAnalysis.isEnableFilterRewrite() || filterMatchesNull(filterClause)) {
if (!joinFilterPreAnalysis.isEnableFilterRewrite() || Filters.filterMatchesNull(filterClause)) {
return JoinFilterAnalysis.createNoPushdownFilterAnalysis(filterClause);
}
@ -423,10 +365,7 @@ public class JoinFilterAnalyzer
);
}
if (filterClause.supportsRequiredColumnRewrite() && doesRequiredColumnSetSupportDirectJoinFilterRewrite(
filterClause.getRequiredColumns(),
joinFilterPreAnalysis.getEquiconditions()
)) {
if (joinFilterPreAnalysis.getEquiconditions().doesFilterSupportDirectJoinFilterRewrite(filterClause)) {
return rewriteFilterDirect(
filterClause,
joinFilterPreAnalysis,
@ -531,16 +470,15 @@ public class JoinFilterAnalyzer
boolean retainRhs = false;
for (Filter filter : orFilter.getFilters()) {
if (!areSomeColumnsFromJoin(joinFilterPreAnalysis.getJoinableClauses(), filter.getRequiredColumns())) {
if (!joinFilterPreAnalysis.getJoinableClauses().areSomeColumnsFromJoin(filter.getRequiredColumns())) {
newFilters.add(filter);
continue;
}
JoinFilterAnalysis rewritten = null;
if (doesRequiredColumnSetSupportDirectJoinFilterRewrite(
filter.getRequiredColumns(),
joinFilterPreAnalysis.getEquiconditions()
)) {
if (joinFilterPreAnalysis.getEquiconditions()
.doesFilterSupportDirectJoinFilterRewrite(filter)
) {
rewritten = rewriteFilterDirect(
filter,
joinFilterPreAnalysis,
@ -598,7 +536,7 @@ public class JoinFilterAnalyzer
return JoinFilterAnalysis.createNoPushdownFilterAnalysis(selectorFilter);
}
if (!areSomeColumnsFromJoin(joinFilterPreAnalysis.getJoinableClauses(), selectorFilter.getRequiredColumns())) {
if (!joinFilterPreAnalysis.getJoinableClauses().areSomeColumnsFromJoin(selectorFilter.getRequiredColumns())) {
return new JoinFilterAnalysis(
false,
selectorFilter,
@ -741,10 +679,10 @@ public class JoinFilterAnalyzer
* the tablePrefix
*/
private static Optional<Map<String, JoinFilterColumnCorrelationAnalysis>> findCorrelatedBaseTableColumns(
List<JoinableClause> joinableClauses,
JoinableClauses joinableClauses,
String tablePrefix,
RhsRewriteCandidate rhsRewriteCandidate,
Map<String, Set<Expr>> equiConditions
Equiconditions equiConditions
)
{
JoinableClause clauseForTablePrefix = rhsRewriteCandidate.getJoinableClause();
@ -807,15 +745,15 @@ public class JoinFilterAnalyzer
* modified.
*/
private static void getCorrelationForRHSColumn(
List<JoinableClause> joinableClauses,
Map<String, Set<Expr>> equiConditions,
JoinableClauses joinableClauses,
Equiconditions equiConditions,
String rhsColumn,
Set<String> correlatedBaseColumns,
Set<Expr> correlatedBaseExpressions
)
{
String findMappingFor = rhsColumn;
Set<Expr> lhsExprs = equiConditions.get(findMappingFor);
Set<Expr> lhsExprs = equiConditions.getLhsExprs(findMappingFor);
if (lhsExprs == null) {
return;
}
@ -827,14 +765,14 @@ public class JoinFilterAnalyzer
Expr.BindingDetails bindingDetails = lhsExpr.analyzeInputs();
Set<String> requiredBindings = bindingDetails.getRequiredBindings();
if (areSomeColumnsFromJoin(joinableClauses, requiredBindings)) {
if (joinableClauses.areSomeColumnsFromJoin(requiredBindings)) {
break;
}
correlatedBaseExpressions.add(lhsExpr);
} else {
// simple identifier, see if we can correlate it with a column on the base table
findMappingFor = identifier;
if (isColumnFromJoin(joinableClauses, identifier) == null) {
if (joinableClauses.getColumnFromJoinIfExists(identifier) == null) {
correlatedBaseColumns.add(findMappingFor);
} else {
getCorrelationForRHSColumn(
@ -887,27 +825,6 @@ public class JoinFilterAnalyzer
return new ArrayList<>(uniquesMap.values());
}
private static boolean filterMatchesNull(Filter filter)
{
ValueMatcher valueMatcher = filter.makeMatcher(ALL_NULL_COLUMN_SELECTOR_FACTORY);
return valueMatcher.matches();
}
@Nullable
private static JoinableClause isColumnFromJoin(
List<JoinableClause> joinableClauses,
String column
)
{
for (JoinableClause joinableClause : joinableClauses) {
if (joinableClause.includesColumn(column)) {
return joinableClause;
}
}
return null;
}
private static boolean isColumnFromPostJoinVirtualColumns(
List<VirtualColumn> postJoinVirtualColumns,
String column
@ -921,19 +838,6 @@ public class JoinFilterAnalyzer
return false;
}
private static boolean areSomeColumnsFromJoin(
List<JoinableClause> joinableClauses,
Collection<String> columns
)
{
for (String column : columns) {
if (isColumnFromJoin(joinableClauses, column) != null) {
return true;
}
}
return false;
}
private static boolean areSomeColumnsFromPostJoinVirtualColumns(
List<VirtualColumn> postJoinVirtualColumns,
Collection<String> columns
@ -946,118 +850,4 @@ public class JoinFilterAnalyzer
}
return false;
}
private static void splitVirtualColumns(
List<JoinableClause> joinableClauses,
final VirtualColumns virtualColumns,
final List<VirtualColumn> preJoinVirtualColumns,
final List<VirtualColumn> postJoinVirtualColumns
)
{
for (VirtualColumn virtualColumn : virtualColumns.getVirtualColumns()) {
if (areSomeColumnsFromJoin(joinableClauses, virtualColumn.requiredColumns())) {
postJoinVirtualColumns.add(virtualColumn);
} else {
preJoinVirtualColumns.add(virtualColumn);
}
}
}
/**
* Determine candidates for filter rewrites.
* A candidate is an RHS column that appears in a filter, along with the value being filtered on, plus
* the joinable clause associated with the table that the RHS column is from.
*
* These candidates are redued to filter rewrite correlations.
*
* @param normalizedJoinTableClauses
* @param equiconditions
* @param joinableClauses
* @return A set of candidates for filter rewrites.
*/
private static Set<RhsRewriteCandidate> getRhsRewriteCandidates(
List<Filter> normalizedJoinTableClauses,
Map<String, Set<Expr>> equiconditions,
List<JoinableClause> joinableClauses)
{
Set<RhsRewriteCandidate> rhsRewriteCandidates = new LinkedHashSet<>();
for (Filter orClause : normalizedJoinTableClauses) {
if (filterMatchesNull(orClause)) {
continue;
}
if (orClause instanceof OrFilter) {
for (Filter subFilter : ((OrFilter) orClause).getFilters()) {
Optional<RhsRewriteCandidate> rhsRewriteCandidate = determineRhsRewriteCandidatesForSingleFilter(
subFilter,
equiconditions,
joinableClauses
);
rhsRewriteCandidate.ifPresent(rhsRewriteCandidates::add);
}
continue;
}
Optional<RhsRewriteCandidate> rhsRewriteCandidate = determineRhsRewriteCandidatesForSingleFilter(
orClause,
equiconditions,
joinableClauses
);
rhsRewriteCandidate.ifPresent(rhsRewriteCandidates::add);
}
return rhsRewriteCandidates;
}
/**
* A candidate is an RHS column that appears in a filter, along with the value being filtered on, plus
* the joinable clause associated with the table that the RHS column is from.
*/
private static class RhsRewriteCandidate
{
private final boolean isDirectRewrite;
private final JoinableClause joinableClause;
private final String rhsColumn;
private final String valueForRewrite;
public RhsRewriteCandidate(
JoinableClause joinableClause,
String rhsColumn,
String valueForRewrite,
boolean isDirectRewrite
)
{
this.joinableClause = joinableClause;
this.rhsColumn = rhsColumn;
this.valueForRewrite = valueForRewrite;
this.isDirectRewrite = isDirectRewrite;
}
public JoinableClause getJoinableClause()
{
return joinableClause;
}
public String getRhsColumn()
{
return rhsColumn;
}
public String getValueForRewrite()
{
return valueForRewrite;
}
/**
* A direct rewrite occurs when we filter on an RHS column that is also part of a join equicondition.
*
* For example, if we have the filter (j.x = 'hello') and the join condition is (y = j.x), we can directly
* rewrite the j.x filter to (y = 'hello').
*/
public boolean isDirectRewrite()
{
return isDirectRewrite;
}
}
}

View File

@ -22,10 +22,12 @@ package org.apache.druid.segment.join.filter;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.math.expr.Expr;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
@ -39,9 +41,9 @@ import java.util.Set;
public class JoinFilterColumnCorrelationAnalysis
{
private final String joinColumn;
private final List<String> baseColumns;
private final List<Expr> baseExpressions;
private Map<Pair<String, String>, Optional<Set<String>>> correlatedValuesMap;
@Nonnull private final List<String> baseColumns;
@Nonnull private final List<Expr> baseExpressions;
private final Map<Pair<String, String>, Optional<Set<String>>> correlatedValuesMap;
public JoinFilterColumnCorrelationAnalysis(
String joinColumn,
@ -61,11 +63,13 @@ public class JoinFilterColumnCorrelationAnalysis
return joinColumn;
}
@Nonnull
public List<String> getBaseColumns()
{
return baseColumns;
}
@Nonnull
public List<Expr> getBaseExpressions()
{
return baseExpressions;
@ -80,4 +84,26 @@ public class JoinFilterColumnCorrelationAnalysis
{
return !baseColumns.isEmpty() || !baseExpressions.isEmpty();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JoinFilterColumnCorrelationAnalysis that = (JoinFilterColumnCorrelationAnalysis) o;
return Objects.equals(joinColumn, that.joinColumn) &&
baseColumns.equals(that.baseColumns) &&
baseExpressions.equals(that.baseExpressions) &&
Objects.equals(correlatedValuesMap, that.correlatedValuesMap);
}
@Override
public int hashCode()
{
return Objects.hash(joinColumn, baseColumns, baseExpressions, correlatedValuesMap);
}
}

View File

@ -48,7 +48,7 @@ import java.util.Set;
*/
public class JoinFilterPreAnalysis
{
private final List<JoinableClause> joinableClauses;
private final JoinableClauses joinableClauses;
private final Filter originalFilter;
private final List<Filter> normalizedBaseTableClauses;
private final List<Filter> normalizedJoinTableClauses;
@ -57,10 +57,10 @@ public class JoinFilterPreAnalysis
private final boolean enableFilterPushDown;
private final boolean enableFilterRewrite;
private final List<VirtualColumn> postJoinVirtualColumns;
private final Map<String, Set<Expr>> equiconditions;
private final Equiconditions equiconditions;
private JoinFilterPreAnalysis(
final List<JoinableClause> joinableClauses,
final JoinableClauses joinableClauses,
final Filter originalFilter,
final List<VirtualColumn> postJoinVirtualColumns,
final List<Filter> normalizedBaseTableClauses,
@ -69,7 +69,7 @@ public class JoinFilterPreAnalysis
final Map<String, List<JoinFilterColumnCorrelationAnalysis>> correlationsByDirectFilteringColumn,
final boolean enableFilterPushDown,
final boolean enableFilterRewrite,
final Map<String, Set<Expr>> equiconditions
final Equiconditions equiconditions
)
{
this.joinableClauses = joinableClauses;
@ -84,7 +84,7 @@ public class JoinFilterPreAnalysis
this.equiconditions = equiconditions;
}
public List<JoinableClause> getJoinableClauses()
public JoinableClauses getJoinableClauses()
{
return joinableClauses;
}
@ -129,7 +129,7 @@ public class JoinFilterPreAnalysis
return enableFilterRewrite;
}
public Map<String, Set<Expr>> getEquiconditions()
public Equiconditions getEquiconditions()
{
return equiconditions;
}
@ -139,7 +139,7 @@ public class JoinFilterPreAnalysis
*/
public static class Builder
{
@Nonnull private final List<JoinableClause> joinableClauses;
@Nonnull private final JoinableClauses joinableClauses;
@Nullable private final Filter originalFilter;
@Nullable private List<Filter> normalizedBaseTableClauses;
@Nullable private List<Filter> normalizedJoinTableClauses;
@ -148,10 +148,10 @@ public class JoinFilterPreAnalysis
private boolean enableFilterPushDown = false;
private boolean enableFilterRewrite = false;
@Nonnull private final List<VirtualColumn> postJoinVirtualColumns;
@Nonnull private Map<String, Set<Expr>> equiconditions = Collections.emptyMap();
@Nonnull private Equiconditions equiconditions = new Equiconditions(Collections.emptyMap());
public Builder(
@Nonnull List<JoinableClause> joinableClauses,
@Nonnull JoinableClauses joinableClauses,
@Nullable Filter originalFilter,
@Nonnull List<VirtualColumn> postJoinVirtualColumns
)
@ -201,18 +201,19 @@ public class JoinFilterPreAnalysis
return this;
}
public Map<String, Set<Expr>> computeEquiconditionsFromJoinableClauses()
public Equiconditions computeEquiconditionsFromJoinableClauses()
{
this.equiconditions = new HashMap<>();
for (JoinableClause clause : joinableClauses) {
Map<String, Set<Expr>> equiconditionsMap = new HashMap<>();
for (JoinableClause clause : joinableClauses.getJoinableClauses()) {
for (Equality equality : clause.getCondition().getEquiConditions()) {
Set<Expr> exprsForRhs = equiconditions.computeIfAbsent(
Set<Expr> exprsForRhs = equiconditionsMap.computeIfAbsent(
clause.getPrefix() + equality.getRightColumn(),
(rhs) -> new HashSet<>()
);
exprsForRhs.add(equality.getLeftExpr());
}
}
this.equiconditions = new Equiconditions(equiconditionsMap);
return equiconditions;
}

View File

@ -0,0 +1,136 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.planning.PreJoinableClause;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.join.Joinable;
import org.apache.druid.segment.join.JoinableClause;
import org.apache.druid.segment.join.JoinableFactory;
import org.apache.druid.segment.join.Joinables;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
public class JoinableClauses
{
@Nonnull private final List<JoinableClause> joinableClauses;
/**
* Builds a list of {@link JoinableClause} corresponding to a list of {@link PreJoinableClause}. This will call
* {@link JoinableFactory#build} on each one and therefore may be an expensive operation.
*/
public static JoinableClauses createClauses(final List<PreJoinableClause> clauses, final JoinableFactory joinableFactory)
{
// Since building a JoinableClause can be expensive, check for prefix conflicts before building
checkPreJoinableClausesForDuplicatesAndShadowing(clauses);
List<JoinableClause> joinableClauses = clauses.stream().map(preJoinableClause -> {
final Optional<Joinable> joinable = joinableFactory.build(
preJoinableClause.getDataSource(),
preJoinableClause.getCondition()
);
return new JoinableClause(
preJoinableClause.getPrefix(),
joinable.orElseThrow(() -> new ISE("dataSource is not joinable: %s", preJoinableClause.getDataSource())),
preJoinableClause.getJoinType(),
preJoinableClause.getCondition()
);
}).collect(Collectors.toList());
return new JoinableClauses(joinableClauses);
}
private JoinableClauses(@Nonnull List<JoinableClause> joinableClauses)
{
this.joinableClauses = joinableClauses;
}
@Nonnull
public List<JoinableClause> getJoinableClauses()
{
return joinableClauses;
}
public void splitVirtualColumns(
final VirtualColumns virtualColumns,
final List<VirtualColumn> preJoinVirtualColumns,
final List<VirtualColumn> postJoinVirtualColumns
)
{
for (VirtualColumn virtualColumn : virtualColumns.getVirtualColumns()) {
if (areSomeColumnsFromJoin(virtualColumn.requiredColumns())) {
postJoinVirtualColumns.add(virtualColumn);
} else {
preJoinVirtualColumns.add(virtualColumn);
}
}
}
public boolean areSomeColumnsFromJoin(
Collection<String> columns
)
{
for (String column : columns) {
if (getColumnFromJoinIfExists(column) != null) {
return true;
}
}
return false;
}
@Nullable
public JoinableClause getColumnFromJoinIfExists(
String column
)
{
for (JoinableClause joinableClause : joinableClauses) {
if (joinableClause.includesColumn(column)) {
return joinableClause;
}
}
return null;
}
private static void checkPreJoinableClausesForDuplicatesAndShadowing(
final List<PreJoinableClause> preJoinableClauses
)
{
List<String> prefixes = new ArrayList<>();
for (PreJoinableClause clause : preJoinableClauses) {
prefixes.add(clause.getPrefix());
}
Joinables.checkPrefixesForDuplicatesAndShadowing(prefixes);
}
public static JoinableClauses fromList(List<JoinableClause> clauses)
{
return new JoinableClauses(clauses);
}
}

View File

@ -0,0 +1,101 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter.rewrite;
import org.apache.druid.segment.join.JoinableClause;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Objects;
/**
* A candidate is an RHS column that appears in a filter, along with the value being filtered on, plus
* the joinable clause associated with the table that the RHS column is from.
*/
public class RhsRewriteCandidate
{
private final boolean isDirectRewrite;
@Nonnull private final JoinableClause joinableClause;
private final String rhsColumn;
@Nullable private final String valueForRewrite;
public RhsRewriteCandidate(
@Nonnull JoinableClause joinableClause,
String rhsColumn,
@Nullable String valueForRewrite,
boolean isDirectRewrite
)
{
this.joinableClause = joinableClause;
this.rhsColumn = rhsColumn;
this.valueForRewrite = valueForRewrite;
this.isDirectRewrite = isDirectRewrite;
}
@Nonnull
public JoinableClause getJoinableClause()
{
return joinableClause;
}
public String getRhsColumn()
{
return rhsColumn;
}
@Nullable
public String getValueForRewrite()
{
return valueForRewrite;
}
/**
* A direct rewrite occurs when we filter on an RHS column that is also part of a join equicondition.
*
* For example, if we have the filter (j.x = 'hello') and the join condition is (y = j.x), we can directly
* rewrite the j.x filter to (y = 'hello').
*/
public boolean isDirectRewrite()
{
return isDirectRewrite;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RhsRewriteCandidate that = (RhsRewriteCandidate) o;
return isDirectRewrite == that.isDirectRewrite &&
joinableClause.equals(that.joinableClause) &&
Objects.equals(rhsColumn, that.rhsColumn) &&
Objects.equals(valueForRewrite, that.valueForRewrite);
}
@Override
public int hashCode()
{
return Objects.hash(isDirectRewrite, joinableClause, rhsColumn, valueForRewrite);
}
}

View File

@ -0,0 +1,157 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter.rewrite;
import org.apache.druid.query.filter.Filter;
import org.apache.druid.segment.filter.Filters;
import org.apache.druid.segment.filter.OrFilter;
import org.apache.druid.segment.filter.SelectorFilter;
import org.apache.druid.segment.join.JoinableClause;
import org.apache.druid.segment.join.filter.Equiconditions;
import org.apache.druid.segment.join.filter.JoinableClauses;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
public class RhsRewriteCandidates
{
private final Set<RhsRewriteCandidate> rhsRewriteCandidates;
private RhsRewriteCandidates(Set<RhsRewriteCandidate> rhsRewriteCandidates)
{
this.rhsRewriteCandidates = rhsRewriteCandidates;
}
public Set<RhsRewriteCandidate> getRhsRewriteCandidates()
{
return rhsRewriteCandidates;
}
/**
* Determine candidates for filter rewrites.
* A candidate is an RHS column that appears in a filter, along with the value being filtered on, plus
* the joinable clause associated with the table that the RHS column is from.
*
* These candidates are redued to filter rewrite correlations.
*
* @param normalizedJoinTableClauses
* @param equiconditions
* @param joinableClauses
* @return A set of candidates for filter rewrites.
*/
public static RhsRewriteCandidates getRhsRewriteCandidates(
List<Filter> normalizedJoinTableClauses,
Equiconditions equiconditions,
JoinableClauses joinableClauses)
{
Set<RhsRewriteCandidate> rhsRewriteCandidates = new LinkedHashSet<>();
for (Filter orClause : normalizedJoinTableClauses) {
if (Filters.filterMatchesNull(orClause)) {
continue;
}
if (orClause instanceof OrFilter) {
for (Filter subFilter : ((OrFilter) orClause).getFilters()) {
Optional<RhsRewriteCandidate> rhsRewriteCandidate = determineRhsRewriteCandidatesForSingleFilter(
subFilter,
equiconditions,
joinableClauses
);
rhsRewriteCandidate.ifPresent(rhsRewriteCandidates::add);
}
continue;
}
Optional<RhsRewriteCandidate> rhsRewriteCandidate = determineRhsRewriteCandidatesForSingleFilter(
orClause,
equiconditions,
joinableClauses
);
rhsRewriteCandidate.ifPresent(rhsRewriteCandidates::add);
}
return new RhsRewriteCandidates(rhsRewriteCandidates);
}
private static Optional<RhsRewriteCandidate> determineRhsRewriteCandidatesForSingleFilter(
Filter orClause,
Equiconditions equiconditions,
JoinableClauses joinableClauses
)
{
// Check if the filter clause is on the RHS join column. If so, we can rewrite the clause to filter on the
// LHS join column instead.
// Currently, we only support rewrites of filters that operate on a single column for simplicity.
if (equiconditions.doesFilterSupportDirectJoinFilterRewrite(orClause)) {
String reqColumn = orClause.getRequiredColumns().iterator().next();
JoinableClause joinableClause = joinableClauses.getColumnFromJoinIfExists(reqColumn);
if (joinableClause != null) {
return Optional.of(
new RhsRewriteCandidate(
joinableClause,
reqColumn,
null,
true
)
);
}
} else if (orClause instanceof SelectorFilter) {
// this is a candidate for RHS filter rewrite, determine column correlations and correlated values
String reqColumn = ((SelectorFilter) orClause).getDimension();
String reqValue = ((SelectorFilter) orClause).getValue();
JoinableClause joinableClause = joinableClauses.getColumnFromJoinIfExists(reqColumn);
if (joinableClause != null) {
return Optional.of(
new RhsRewriteCandidate(
joinableClause,
reqColumn,
reqValue,
false
)
);
}
}
return Optional.empty();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RhsRewriteCandidates that = (RhsRewriteCandidates) o;
return Objects.equals(rhsRewriteCandidates, that.rhsRewriteCandidates);
}
@Override
public int hashCode()
{
return Objects.hash(rhsRewriteCandidates);
}
}

View File

@ -29,6 +29,7 @@ import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.segment.join.lookup.LookupJoinable;
import org.apache.druid.segment.join.table.IndexedTable;
import org.apache.druid.segment.join.table.IndexedTableJoinable;
@ -187,7 +188,7 @@ public class BaseHashJoinSegmentStorageAdapterTest
protected HashJoinSegmentStorageAdapter makeFactToCountrySegment()
{
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT)),
JoinableClauses.fromList(ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT))),
VirtualColumns.EMPTY,
null,
true,

View File

@ -39,6 +39,7 @@ import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.filter.SelectorFilter;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.segment.join.lookup.LookupJoinable;
import org.apache.druid.segment.join.table.IndexedTableJoinable;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
@ -302,7 +303,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -371,7 +372,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -438,7 +439,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
{
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.INNER));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -500,7 +501,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
{
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.INNER));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -564,7 +565,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnNumber(JoinType.INNER));
Filter filter = new SelectorDimFilter("channel", "#en.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -634,7 +635,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingNumberLookup(JoinType.INNER));
Filter filter = new SelectorDimFilter("channel", "#en.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -700,7 +701,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -741,7 +742,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT));
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -781,7 +782,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.RIGHT));
Filter filter = new SelectorDimFilter("channel", null, null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -824,7 +825,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.RIGHT));
Filter filter = new SelectorDimFilter("channel", null, null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -866,7 +867,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.FULL));
Filter filter = new SelectorDimFilter("channel", null, null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -909,7 +910,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.FULL));
Filter filter = new SelectorDimFilter("channel", null, null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -956,7 +957,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1003,7 +1004,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1050,7 +1051,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1097,7 +1098,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1157,7 +1158,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1239,7 +1240,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1304,7 +1305,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -1383,7 +1384,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1450,7 +1451,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1501,7 +1502,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1569,7 +1570,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter filter = new SelectorDimFilter("channel", "#de.wikipedia", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1629,7 +1630,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
virtualColumns,
null,
true,
@ -1695,7 +1696,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
virtualColumns,
null,
true,
@ -1753,7 +1754,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -1810,7 +1811,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -1870,7 +1871,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter filter = new SelectorDimFilter("regionIsoCode", "VA", null).toFilter();
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
filter,
true,
@ -1930,7 +1931,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -1976,7 +1977,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -2022,7 +2023,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -2067,7 +2068,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
);
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -2100,7 +2101,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
originalFilter,
true,
@ -2139,7 +2140,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
Filter originalFilter = new SelectorFilter("page", "this matches nothing");
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
originalFilter,
true,
@ -2177,7 +2178,7 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,

View File

@ -27,6 +27,7 @@ import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.VirtualColumns;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.segment.join.table.IndexedTableJoinable;
import org.apache.druid.timeline.SegmentId;
import org.hamcrest.CoreMatchers;
@ -82,7 +83,7 @@ public class HashJoinSegmentTest
);
JoinFilterPreAnalysis joinFilterPreAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,
@ -107,7 +108,7 @@ public class HashJoinSegmentTest
List<JoinableClause> joinableClauses = ImmutableList.of();
JoinFilterPreAnalysis joinFilterPreAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
null,
true,

View File

@ -44,6 +44,7 @@ import org.apache.druid.segment.filter.SelectorFilter;
import org.apache.druid.segment.join.filter.JoinFilterAnalyzer;
import org.apache.druid.segment.join.filter.JoinFilterPreAnalysis;
import org.apache.druid.segment.join.filter.JoinFilterSplit;
import org.apache.druid.segment.join.filter.JoinableClauses;
import org.apache.druid.segment.join.lookup.LookupJoinable;
import org.apache.druid.segment.join.table.IndexedTableJoinable;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
@ -466,9 +467,9 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
)
);
List<JoinableClause> joinableClauses = ImmutableList.of(
JoinableClauses joinableClauses = JoinableClauses.fromList(ImmutableList.of(
factToRegion(JoinType.LEFT)
);
));
JoinFilterPreAnalysis joinFilterPreAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
@ -482,7 +483,7 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(
factSegment.asStorageAdapter(),
joinableClauses,
joinableClauses.getJoinableClauses(),
joinFilterPreAnalysis
);
@ -1974,10 +1975,10 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
@Test
public void test_filterPushDown_factToRegionToCountryLeftFilterOnPageDisablePushDown()
{
List<JoinableClause> joinableClauses = ImmutableList.of(
JoinableClauses joinableClauses = JoinableClauses.fromList(ImmutableList.of(
factToRegion(JoinType.LEFT),
regionToCountry(JoinType.LEFT)
);
));
Filter originalFilter = new SelectorFilter("page", "Peremptory norm");
JoinFilterPreAnalysis joinFilterPreAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
@ -1991,7 +1992,7 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(
factSegment.asStorageAdapter(),
joinableClauses,
joinableClauses.getJoinableClauses(),
joinFilterPreAnalysis
);
@ -2026,10 +2027,10 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
@Test
public void test_filterPushDown_factToRegionToCountryLeftEnablePushDownDisableRewrite()
{
List<JoinableClause> joinableClauses = ImmutableList.of(
JoinableClauses joinableClauses = JoinableClauses.fromList(ImmutableList.of(
factToRegion(JoinType.LEFT),
regionToCountry(JoinType.LEFT)
);
));
Filter originalFilter = new AndFilter(
ImmutableList.of(
new SelectorFilter("channel", "#en.wikipedia"),
@ -2063,7 +2064,7 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
);
HashJoinSegmentStorageAdapter adapter = new HashJoinSegmentStorageAdapter(
factSegment.asStorageAdapter(),
joinableClauses,
joinableClauses.getJoinableClauses(),
joinFilterPreAnalysis
);
@ -2450,7 +2451,7 @@ public class JoinFilterAnalyzerTest extends BaseHashJoinSegmentStorageAdapterTes
)
{
return JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
JoinableClauses.fromList(joinableClauses),
VirtualColumns.EMPTY,
originalFilter,
true,

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class JoinFilterColumnCorrelationAnalysisTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(JoinFilterColumnCorrelationAnalysis.class).usingGetClass().verify();
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter.rewrite;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class RhsRewriteCandateTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(RhsRewriteCandidate.class).usingGetClass().verify();
}
}

View File

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.join.filter.rewrite;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class RhsRewriteCandidatesTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(RhsRewriteCandidates.class).usingGetClass().verify();
}
}