Fail incorrectly constructed join queries (#9830)

* Fail incorrectly constructed join queries

* wip annotation for equals implementations

* Add equals tests

* fix tests

* Actually fix the tests

* Address review comments

* prohibit Pattern.hashCode()
This commit is contained in:
Suneet Saldanha 2020-05-13 14:23:04 -07:00 committed by GitHub
parent 6bc1d1b33f
commit b0167295d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
64 changed files with 2204 additions and 399 deletions

View File

@ -69,7 +69,6 @@
</option> </option>
<option name="IGNORE_FIELDS_USED_IN_MULTIPLE_METHODS" value="true" /> <option name="IGNORE_FIELDS_USED_IN_MULTIPLE_METHODS" value="true" />
</inspection_tool> </inspection_tool>
<inspection_tool class="FieldMayBeFinal" enabled="true" level="WARNING" enabled_by_default="true" />
<inspection_tool class="FinalStaticMethod" enabled="true" level="ERROR" enabled_by_default="true" /> <inspection_tool class="FinalStaticMethod" enabled="true" level="ERROR" enabled_by_default="true" />
<inspection_tool class="FlowJSError" enabled="false" level="Non-TeamCity Error" enabled_by_default="false" /> <inspection_tool class="FlowJSError" enabled="false" level="Non-TeamCity Error" enabled_by_default="false" />
<inspection_tool class="ForCanBeForeach" enabled="true" level="WARNING" enabled_by_default="true"> <inspection_tool class="ForCanBeForeach" enabled="true" level="WARNING" enabled_by_default="true">
@ -181,22 +180,22 @@
<constraint name="y" nameOfExprType="IndexedInts" exprTypeWithinHierarchy="true" within="" contains="" /> <constraint name="y" nameOfExprType="IndexedInts" exprTypeWithinHierarchy="true" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Lists.newArrayList() with a single argument. Use Collections.singletonList() instead" created="1532737126203" text="Lists.newArrayList($x$)" recursive="false" caseInsensitive="true" type="JAVA"> <searchConfiguration name="Lists.newArrayList() with a single argument. Use Collections.singletonList() instead" created="1532737126203" text="Lists.newArrayList($x$)" recursive="false" caseInsensitive="true" type="JAVA">
<constraint name="x" nameOfExprType="java\.lang\.Iterable|java\.util\.Iterator|Object\[\]" expressionTypes="java.lang.Iterable|java.util.Iterator|Object[]" exprTypeWithinHierarchy="true" negateName="true" negateExprType="true" within="" contains="" /> <constraint name="x" nameOfExprType="java\.lang\.Iterable|java\.util\.Iterator|Object\[\]" exprTypeWithinHierarchy="true" negateName="true" negateExprType="true" within="" contains="" />
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Math.abs(rnd.nextInt()) doensn't guarantee positive result. Use nextInt() &amp; Integer.MAX_VALUE or nextInt(Integer.MAX_VALUE)" created="1535067616084" text="$Math$.abs($x$.nextInt())" recursive="false" caseInsensitive="true" type="JAVA"> <searchConfiguration name="Math.abs(rnd.nextInt()) doensn't guarantee positive result. Use nextInt() &amp; Integer.MAX_VALUE or nextInt(Integer.MAX_VALUE)" created="1535067616084" text="$Math$.abs($x$.nextInt())" recursive="false" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" /> <constraint name="x" nameOfFormalType="java\.util\.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
<constraint name="Math" within="" contains="" /> <constraint name="Math" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Math.abs(rnd.nextLong()) doesn't guarantee positive result. Use nextLong() &amp; Long.MAX_VALUE" created="1535067616084" text="$Math$.abs($x$.nextLong())" recursive="false" caseInsensitive="true" type="JAVA"> <searchConfiguration name="Math.abs(rnd.nextLong()) doesn't guarantee positive result. Use nextLong() &amp; Long.MAX_VALUE" created="1535067616084" text="$Math$.abs($x$.nextLong())" recursive="false" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" /> <constraint name="x" nameOfFormalType="java\.util\.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
<constraint name="Math" within="" contains="" /> <constraint name="Math" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Use nextInt(bound) instead" created="1535068047572" text="$x$.nextInt() % $a$" recursive="false" caseInsensitive="true" type="JAVA"> <searchConfiguration name="Use nextInt(bound) instead" created="1535068047572" text="$x$.nextInt() % $a$" recursive="false" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfFormalType="java\.util\.Random" exceptedTypes="java.util.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" /> <constraint name="x" nameOfFormalType="java\.util\.Random" exprTypeWithinHierarchy="true" formalTypeWithinHierarchy="true" within="" contains="" />
<constraint name="a" within="" contains="" /> <constraint name="a" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Use ZKPaths.makePath() with many arguments" created="1537504371779" text="org.apache.curator.utils.ZKPaths.makePath(org.apache.curator.utils.ZKPaths.makePath($x$, $y$), $z$)" recursive="false" caseInsensitive="true" type="JAVA"> <searchConfiguration name="Use ZKPaths.makePath() with many arguments" created="1537504371779" text="org.apache.curator.utils.ZKPaths.makePath(org.apache.curator.utils.ZKPaths.makePath($x$, $y$), $z$)" recursive="false" caseInsensitive="true" type="JAVA">
@ -272,29 +271,29 @@
<constraint name="K" within="" contains="" /> <constraint name="K" within="" contains="" />
<constraint name="V" within="" contains="" /> <constraint name="V" within="" contains="" />
<constraint name="x" within="" contains="" /> <constraint name="x" within="" contains="" />
<constraint name="y" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" expressionTypes="java.util.concurrent.ConcurrentMap" exprTypeWithinHierarchy="true" within="" contains="" /> <constraint name="y" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" exprTypeWithinHierarchy="true" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="A ConcurrentHashMap on which compute() is called should be assinged into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.compute($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA"> <searchConfiguration name="A ConcurrentHashMap on which compute() is called should be assinged into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.compute($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" expressionTypes="java.util.concurrent.ConcurrentMap" within="" contains="" /> <constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" within="" contains="" />
<constraint name="y" within="" contains="" /> <constraint name="y" within="" contains="" />
<constraint name="z" within="" contains="" /> <constraint name="z" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="A ConcurrentHashMap on which computeIfAbsent() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.computeIfAbsent($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA"> <searchConfiguration name="A ConcurrentHashMap on which computeIfAbsent() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.computeIfAbsent($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" expressionTypes="java.util.concurrent.ConcurrentMap" within="" contains="" /> <constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" within="" contains="" />
<constraint name="y" within="" contains="" /> <constraint name="y" within="" contains="" />
<constraint name="z" within="" contains="" /> <constraint name="z" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="A ConcurrentHashMap on which computeIfPresent() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.computeIfPresent($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA"> <searchConfiguration name="A ConcurrentHashMap on which computeIfPresent() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.computeIfPresent($y$, $z$)" recursive="true" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" expressionTypes="java.util.concurrent.ConcurrentMap" within="" contains="" /> <constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" within="" contains="" />
<constraint name="y" within="" contains="" /> <constraint name="y" within="" contains="" />
<constraint name="z" within="" contains="" /> <constraint name="z" within="" contains="" />
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="A ConcurrentHashMap on which merge() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.merge($a$, $b$, $c$)" recursive="true" caseInsensitive="true" type="JAVA"> <searchConfiguration name="A ConcurrentHashMap on which merge() is called should be assigned into variables of ConcurrentHashMap type, not ConcurrentMap" text="$x$.merge($a$, $b$, $c$)" recursive="true" caseInsensitive="true" type="JAVA">
<constraint name="__context__" target="true" within="" contains="" /> <constraint name="__context__" target="true" within="" contains="" />
<constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" expressionTypes="java.util.concurrent.ConcurrentMap" within="" contains="" /> <constraint name="x" nameOfExprType="java\.util\.concurrent\.ConcurrentMap" within="" contains="" />
<constraint name="a" within="" contains="" /> <constraint name="a" within="" contains="" />
<constraint name="b" within="" contains="" /> <constraint name="b" within="" contains="" />
<constraint name="c" within="" contains="" /> <constraint name="c" within="" contains="" />
@ -373,8 +372,27 @@
</searchConfiguration> </searchConfiguration>
<searchConfiguration name="Create a simple ExecutorService (not scheduled)" text="$x$ = $y$;" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default"> <searchConfiguration name="Create a simple ExecutorService (not scheduled)" text="$x$ = $y$;" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default">
<constraint name="__context__" within="" contains="" /> <constraint name="__context__" within="" contains="" />
<constraint name="x" nameOfExprType="java\.util\.concurrent\.ExecutorService" expressionTypes="java.util.concurrent.ExecutorService" within="" contains="" /> <constraint name="x" nameOfExprType="java\.util\.concurrent\.ExecutorService" within="" contains="" />
<constraint name="y" nameOfExprType="java\.util\.concurrent\.ScheduledExecutorService" expressionTypes="java.util.concurrent.ScheduledExecutorService" exprTypeWithinHierarchy="true" within="" contains="" /> <constraint name="y" nameOfExprType="java\.util\.concurrent\.ScheduledExecutorService" exprTypeWithinHierarchy="true" within="" contains="" />
</searchConfiguration>
<searchConfiguration name="Use equals on Pattern.toString()" text="$a$.equals($b$)" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default">
<constraint name="__context__" within="" contains="" />
<constraint name="a" nameOfExprType="Pattern" within="" contains="" />
<constraint name="b" within="" contains="" />
</searchConfiguration>
<searchConfiguration name="Use Objects.equals() on Pattern.toString()" text="java.util.Objects.equals($a$, $b$)" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default">
<constraint name="__context__" within="" contains="" />
<constraint name="a" nameOfExprType="Pattern" within="" contains="" />
<constraint name="b" within="" contains="" />
</searchConfiguration>
<searchConfiguration name="Use hashCode of Pattern.toString() instead" text="$a$.hashCode()" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default">
<constraint name="__context__" nameOfExprType="java\.util\.regex\.Pattern" within="" contains="" />
<constraint name="a" within="" contains="" />
</searchConfiguration>
<searchConfiguration name="Use Objects.hash() on Pattern.toString() instead" text="java.util.Objects.hash($b$,$a$)" recursive="true" caseInsensitive="true" type="JAVA" pattern_context="default">
<constraint name="__context__" within="" contains="" />
<constraint name="a" nameOfExprType="java\.util\.regex\.Pattern" within="" contains="" />
<constraint name="b" minCount="0" maxCount="2147483647" within="" contains="" />
</searchConfiguration> </searchConfiguration>
</inspection_tool> </inspection_tool>
<inspection_tool class="SimplifyStreamApiCallChains" enabled="true" level="ERROR" enabled_by_default="true" /> <inspection_tool class="SimplifyStreamApiCallChains" enabled="true" level="ERROR" enabled_by_default="true" />

View File

@ -0,0 +1,41 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.annotations;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* An annotation that tells all subclasses of the annotated class to not use the default implementation of hashCode
* and equals that is provided by {@link Object}.
*
* This annotation is useful on classes that you expect will be used in equals checks in other parts of the codebase.
*/
@Documented
@Inherited
@Retention(RetentionPolicy.CLASS)
@Target(ElementType.TYPE)
public @interface SubclassesMustOverrideEqualsAndHashCode
{
}

View File

@ -27,6 +27,7 @@ import com.google.common.collect.Sets;
import com.google.common.math.LongMath; import com.google.common.math.LongMath;
import com.google.common.primitives.Ints; import com.google.common.primitives.Ints;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringEscapeUtils;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.ISE;
@ -46,6 +47,7 @@ import java.util.stream.Collectors;
* Base interface of Druid expression language abstract syntax tree nodes. All {@link Expr} implementations are * Base interface of Druid expression language abstract syntax tree nodes. All {@link Expr} implementations are
* immutable. * immutable.
*/ */
@SubclassesMustOverrideEqualsAndHashCode
public interface Expr public interface Expr
{ {
String NULL_LITERAL = "null"; String NULL_LITERAL = "null";
@ -522,6 +524,25 @@ class LongExpr extends ConstantExpr
{ {
return ExprEval.ofLong(value); return ExprEval.ofLong(value);
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LongExpr longExpr = (LongExpr) o;
return Objects.equals(value, longExpr.value);
}
@Override
public int hashCode()
{
return Objects.hash(value);
}
} }
class NullLongExpr extends NullNumericConstantExpr class NullLongExpr extends NullNumericConstantExpr
@ -531,6 +552,18 @@ class NullLongExpr extends NullNumericConstantExpr
{ {
return ExprEval.ofLong(null); return ExprEval.ofLong(null);
} }
@Override
public final int hashCode()
{
return NullLongExpr.class.hashCode();
}
@Override
public final boolean equals(Object obj)
{
return obj instanceof NullLongExpr;
}
} }
@ -569,6 +602,25 @@ class LongArrayExpr extends ConstantExpr
} }
return StringUtils.format("<LONG>%s", toString()); return StringUtils.format("<LONG>%s", toString());
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LongArrayExpr that = (LongArrayExpr) o;
return Arrays.equals(value, that.value);
}
@Override
public int hashCode()
{
return Arrays.hashCode(value);
}
} }
class StringExpr extends ConstantExpr class StringExpr extends ConstantExpr
@ -606,6 +658,25 @@ class StringExpr extends ConstantExpr
// escape as javascript string since string literals are wrapped in single quotes // escape as javascript string since string literals are wrapped in single quotes
return value == null ? NULL_LITERAL : StringUtils.format("'%s'", StringEscapeUtils.escapeJavaScript(value)); return value == null ? NULL_LITERAL : StringUtils.format("'%s'", StringEscapeUtils.escapeJavaScript(value));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StringExpr that = (StringExpr) o;
return Objects.equals(value, that.value);
}
@Override
public int hashCode()
{
return Objects.hash(value);
}
} }
class StringArrayExpr extends ConstantExpr class StringArrayExpr extends ConstantExpr
@ -655,6 +726,25 @@ class StringArrayExpr extends ConstantExpr
) )
); );
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
StringArrayExpr that = (StringArrayExpr) o;
return Arrays.equals(value, that.value);
}
@Override
public int hashCode()
{
return Arrays.hashCode(value);
}
} }
class DoubleExpr extends ConstantExpr class DoubleExpr extends ConstantExpr
@ -683,6 +773,25 @@ class DoubleExpr extends ConstantExpr
{ {
return ExprEval.ofDouble(value); return ExprEval.ofDouble(value);
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DoubleExpr that = (DoubleExpr) o;
return Objects.equals(value, that.value);
}
@Override
public int hashCode()
{
return Objects.hash(value);
}
} }
class NullDoubleExpr extends NullNumericConstantExpr class NullDoubleExpr extends NullNumericConstantExpr
@ -692,6 +801,18 @@ class NullDoubleExpr extends NullNumericConstantExpr
{ {
return ExprEval.ofDouble(null); return ExprEval.ofDouble(null);
} }
@Override
public final int hashCode()
{
return NullDoubleExpr.class.hashCode();
}
@Override
public final boolean equals(Object obj)
{
return obj instanceof NullDoubleExpr;
}
} }
class DoubleArrayExpr extends ConstantExpr class DoubleArrayExpr extends ConstantExpr
@ -729,6 +850,25 @@ class DoubleArrayExpr extends ConstantExpr
} }
return StringUtils.format("<DOUBLE>%s", toString()); return StringUtils.format("<DOUBLE>%s", toString());
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DoubleArrayExpr that = (DoubleArrayExpr) o;
return Arrays.equals(value, that.value);
}
@Override
public int hashCode()
{
return Arrays.hashCode(value);
}
} }
/** /**
@ -837,6 +977,25 @@ class IdentifierExpr implements Expr
{ {
return shuttle.visit(this); return shuttle.visit(this);
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
IdentifierExpr that = (IdentifierExpr) o;
return Objects.equals(identifier, that.identifier);
}
@Override
public int hashCode()
{
return Objects.hash(identifier);
}
} }
class LambdaExpr implements Expr class LambdaExpr implements Expr
@ -921,6 +1080,26 @@ class LambdaExpr implements Expr
BindingDetails bodyDetails = expr.analyzeInputs(); BindingDetails bodyDetails = expr.analyzeInputs();
return bodyDetails.removeLambdaArguments(lambdaArgs); return bodyDetails.removeLambdaArguments(lambdaArgs);
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LambdaExpr that = (LambdaExpr) o;
return Objects.equals(args, that.args) &&
Objects.equals(expr, that.expr);
}
@Override
public int hashCode()
{
return Objects.hash(args, expr);
}
} }
/** /**
@ -989,6 +1168,26 @@ class FunctionExpr implements Expr
.withArrayInputs(function.hasArrayInputs()) .withArrayInputs(function.hasArrayInputs())
.withArrayOutput(function.hasArrayOutput()); .withArrayOutput(function.hasArrayOutput());
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FunctionExpr that = (FunctionExpr) o;
return args.equals(that.args) &&
name.equals(that.name);
}
@Override
public int hashCode()
{
return Objects.hash(args, name);
}
} }
/** /**
@ -1080,6 +1279,27 @@ class ApplyFunctionExpr implements Expr
{ {
return bindingDetails; return bindingDetails;
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ApplyFunctionExpr that = (ApplyFunctionExpr) o;
return name.equals(that.name) &&
lambdaExpr.equals(that.lambdaExpr) &&
argsExpr.equals(that.argsExpr);
}
@Override
public int hashCode()
{
return Objects.hash(name, lambdaExpr, argsExpr);
}
} }
/** /**
@ -1120,6 +1340,25 @@ abstract class UnaryExpr implements Expr
// currently all unary operators only operate on scalar inputs // currently all unary operators only operate on scalar inputs
return expr.analyzeInputs().withScalarArguments(ImmutableSet.of(expr)); return expr.analyzeInputs().withScalarArguments(ImmutableSet.of(expr));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UnaryExpr unaryExpr = (UnaryExpr) o;
return Objects.equals(expr, unaryExpr.expr);
}
@Override
public int hashCode()
{
return Objects.hash(expr);
}
} }
class UnaryMinusExpr extends UnaryExpr class UnaryMinusExpr extends UnaryExpr
@ -1262,6 +1501,27 @@ abstract class BinaryOpExprBase implements Expr
// currently all binary operators operate on scalar inputs // currently all binary operators operate on scalar inputs
return left.analyzeInputs().with(right).withScalarArguments(ImmutableSet.of(left, right)); return left.analyzeInputs().with(right).withScalarArguments(ImmutableSet.of(left, right));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BinaryOpExprBase that = (BinaryOpExprBase) o;
return Objects.equals(op, that.op) &&
Objects.equals(left, that.left) &&
Objects.equals(right, that.right);
}
@Override
public int hashCode()
{
return Objects.hash(op, left, right);
}
} }
/** /**
@ -1711,5 +1971,6 @@ class BinOrExpr extends BinaryOpExprBase
ExprEval leftVal = left.eval(bindings); ExprEval leftVal = left.eval(bindings);
return leftVal.asBoolean() ? leftVal : right.eval(bindings); return leftVal.asBoolean() ? leftVal : right.eval(bindings);
} }
} }

View File

@ -30,6 +30,7 @@ import javax.annotation.Nullable;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -129,6 +130,26 @@ public class ExprMacroTable
return StringUtils.format("%s(%s)", name, arg.stringify()); return StringUtils.format("%s(%s)", name, arg.stringify());
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BaseScalarUnivariateMacroFunctionExpr that = (BaseScalarUnivariateMacroFunctionExpr) o;
return Objects.equals(name, that.name) &&
Objects.equals(arg, that.arg);
}
@Override
public int hashCode()
{
return Objects.hash(name, arg);
}
private BindingDetails supplyAnalyzeInputs() private BindingDetails supplyAnalyzeInputs()
{ {
return arg.analyzeInputs().withScalarArguments(ImmutableSet.of(arg)); return arg.analyzeInputs().withScalarArguments(ImmutableSet.of(arg));
@ -178,6 +199,26 @@ public class ExprMacroTable
return analyzeInputsSupplier.get(); return analyzeInputsSupplier.get();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BaseScalarMacroFunctionExpr that = (BaseScalarMacroFunctionExpr) o;
return Objects.equals(name, that.name) &&
Objects.equals(args, that.args);
}
@Override
public int hashCode()
{
return Objects.hash(name, args);
}
private BindingDetails supplyAnalyzeInputs() private BindingDetails supplyAnalyzeInputs()
{ {
final Set<Expr> argSet = Sets.newHashSetWithExpectedSize(args.size()); final Set<Expr> argSet = Sets.newHashSetWithExpectedSize(args.size());

View File

@ -0,0 +1,190 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.math.expr;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class ExprTest
{
@Test
public void testEqualsContractForBinOrExpr()
{
EqualsVerifier.forClass(BinOrExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinGtExpr()
{
EqualsVerifier.forClass(BinGtExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinMinusExpr()
{
EqualsVerifier.forClass(BinMinusExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinPowExpr()
{
EqualsVerifier.forClass(BinPowExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinMulExpr()
{
EqualsVerifier.forClass(BinMulExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinDivExpr()
{
EqualsVerifier.forClass(BinDivExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinModuloExpr()
{
EqualsVerifier.forClass(BinModuloExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinPlusExpr()
{
EqualsVerifier.forClass(BinPlusExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinLtExpr()
{
EqualsVerifier.forClass(BinLtExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinGeqExpr()
{
EqualsVerifier.forClass(BinGeqExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinEqExpr()
{
EqualsVerifier.forClass(BinEqExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinNeqExpr()
{
EqualsVerifier.forClass(BinNeqExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBinAndExpr()
{
EqualsVerifier.forClass(BinAndExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForFunctionExpr()
{
EqualsVerifier.forClass(FunctionExpr.class).usingGetClass().withIgnoredFields("function").verify();
}
@Test
public void testEqualsContractForApplyFunctionExpr()
{
EqualsVerifier.forClass(ApplyFunctionExpr.class)
.usingGetClass()
.withIgnoredFields("function", "bindingDetails", "lambdaBindingDetails", "argsBindingDetails")
.verify();
}
@Test
public void testEqualsContractForUnaryNotExpr()
{
EqualsVerifier.forClass(UnaryNotExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForUnaryMinusExpr()
{
EqualsVerifier.forClass(UnaryMinusExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForStringExpr()
{
EqualsVerifier.forClass(StringExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForDoubleExpr()
{
EqualsVerifier.forClass(DoubleExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForLongExpr()
{
EqualsVerifier.forClass(LongExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForStringArrayExpr()
{
EqualsVerifier.forClass(StringArrayExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForLongArrayExpr()
{
EqualsVerifier.forClass(LongArrayExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForDoubleArrayExpr()
{
EqualsVerifier.forClass(DoubleArrayExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForIdentifierExpr()
{
EqualsVerifier.forClass(IdentifierExpr.class).usingGetClass().withIgnoredFields("binding").verify();
}
@Test
public void testEqualsContractForLambdaExpr()
{
EqualsVerifier.forClass(LambdaExpr.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForNullLongExpr()
{
EqualsVerifier.forClass(NullLongExpr.class).verify();
}
@Test
public void testEqualsContractForNullDoubleExpr()
{
EqualsVerifier.forClass(NullDoubleExpr.class).verify();
}
}

View File

@ -204,6 +204,12 @@
<artifactId>equalsverifier</artifactId> <artifactId>equalsverifier</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>org.reflections</groupId>
<artifactId>reflections</artifactId>
<version>0.9.12</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>pl.pragmatists</groupId> <groupId>pl.pragmatists</groupId>
<artifactId>JUnitParams</artifactId> <artifactId>JUnitParams</artifactId>

View File

@ -21,6 +21,7 @@ package org.apache.druid.collections.spatial.search;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import org.apache.druid.collections.spatial.ImmutableNode; import org.apache.druid.collections.spatial.ImmutableNode;
import org.apache.druid.collections.spatial.ImmutablePoint; import org.apache.druid.collections.spatial.ImmutablePoint;
@ -32,6 +33,7 @@ import org.apache.druid.collections.spatial.ImmutablePoint;
@JsonSubTypes.Type(name = "radius", value = RadiusBound.class), @JsonSubTypes.Type(name = "radius", value = RadiusBound.class),
@JsonSubTypes.Type(name = "polygon", value = PolygonBound.class) @JsonSubTypes.Type(name = "polygon", value = PolygonBound.class)
}) })
@SubclassesMustOverrideEqualsAndHashCode
public interface Bound public interface Bound
{ {
int getLimit(); int getLimit();

View File

@ -27,6 +27,7 @@ import com.google.common.collect.Iterables;
import org.apache.druid.collections.spatial.ImmutablePoint; import org.apache.druid.collections.spatial.ImmutablePoint;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
/** /**
*/ */
@ -195,4 +196,30 @@ public class PolygonBound extends RectangularBound
return cacheKey.array(); return cacheKey.array();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
PolygonBound that = (PolygonBound) o;
return Arrays.equals(abscissa, that.abscissa) &&
Arrays.equals(ordinate, that.ordinate);
}
@Override
public int hashCode()
{
int result = super.hashCode();
result = 31 * result + Arrays.hashCode(abscissa);
result = 31 * result + Arrays.hashCode(ordinate);
return result;
}
} }

View File

@ -28,6 +28,8 @@ import org.apache.druid.collections.spatial.ImmutableNode;
import org.apache.druid.collections.spatial.ImmutablePoint; import org.apache.druid.collections.spatial.ImmutablePoint;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Objects;
/** /**
*/ */
@ -151,4 +153,29 @@ public class RectangularBound implements Bound
.put(CACHE_TYPE_ID); .put(CACHE_TYPE_ID);
return cacheKey.array(); return cacheKey.array();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RectangularBound that = (RectangularBound) o;
return limit == that.limit &&
numDims == that.numDims &&
Arrays.equals(minCoords, that.minCoords) &&
Arrays.equals(maxCoords, that.maxCoords);
}
@Override
public int hashCode()
{
int result = Objects.hash(limit, numDims);
result = 31 * result + Arrays.hashCode(minCoords);
result = 31 * result + Arrays.hashCode(maxCoords);
return result;
}
} }

View File

@ -21,6 +21,7 @@ package org.apache.druid.query.dimension;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import org.apache.druid.java.util.common.Cacheable; import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.UOE;
import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.extraction.ExtractionFn;
@ -43,6 +44,7 @@ import javax.annotation.Nullable;
@JsonSubTypes.Type(name = "listFiltered", value = ListFilteredDimensionSpec.class), @JsonSubTypes.Type(name = "listFiltered", value = ListFilteredDimensionSpec.class),
@JsonSubTypes.Type(name = "prefixFiltered", value = PrefixFilteredDimensionSpec.class) @JsonSubTypes.Type(name = "prefixFiltered", value = PrefixFilteredDimensionSpec.class)
}) })
@SubclassesMustOverrideEqualsAndHashCode
public interface DimensionSpec extends Cacheable public interface DimensionSpec extends Cacheable
{ {
String getDimension(); String getDimension();

View File

@ -136,7 +136,6 @@ public class RegexFilteredDimensionSpec extends BaseFilteredDimensionSpec
return false; return false;
} }
return pattern.equals(that.pattern); return pattern.equals(that.pattern);
} }
@Override @Override

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.expression; package org.apache.druid.query.expression;
import com.google.common.annotations.VisibleForTesting;
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.granularity.Granularity;
@ -30,6 +31,7 @@ import org.joda.time.DateTime;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro
@ -56,7 +58,8 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro
} }
} }
private static class TimestampCeilExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr @VisibleForTesting
static class TimestampCeilExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
{ {
private final Granularity granularity; private final Granularity granularity;
@ -89,6 +92,28 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro
List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList()); List<Expr> newArgs = args.stream().map(x -> x.visit(shuttle)).collect(Collectors.toList());
return shuttle.visit(new TimestampCeilExpr(newArgs)); return shuttle.visit(new TimestampCeilExpr(newArgs));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
TimestampCeilExpr that = (TimestampCeilExpr) o;
return Objects.equals(granularity, that.granularity);
}
@Override
public int hashCode()
{
return Objects.hash(super.hashCode(), granularity);
}
} }
private static PeriodGranularity getGranularity(final List<Expr> args, final Expr.ObjectBinding bindings) private static PeriodGranularity getGranularity(final List<Expr> args, final Expr.ObjectBinding bindings)
@ -101,7 +126,8 @@ public class TimestampCeilExprMacro implements ExprMacroTable.ExprMacro
); );
} }
private static class TimestampCeilDynamicExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr @VisibleForTesting
static class TimestampCeilDynamicExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr
{ {
TimestampCeilDynamicExpr(final List<Expr> args) TimestampCeilDynamicExpr(final List<Expr> args)
{ {

View File

@ -28,6 +28,7 @@ import org.apache.druid.math.expr.ExprMacroTable;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors; import java.util.stream.Collectors;
public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
@ -109,6 +110,28 @@ public class TimestampFloorExprMacro implements ExprMacroTable.ExprMacro
return shuttle.visit(new TimestampFloorExpr(newArgs)); return shuttle.visit(new TimestampFloorExpr(newArgs));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
TimestampFloorExpr that = (TimestampFloorExpr) o;
return Objects.equals(granularity, that.granularity);
}
@Override
public int hashCode()
{
return Objects.hash(super.hashCode(), granularity);
}
} }
public static class TimestampFloorDynamicExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr public static class TimestampFloorDynamicExpr extends ExprMacroTable.BaseScalarMacroFunctionExpr

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.expression; package org.apache.druid.query.expression;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
@ -27,7 +28,9 @@ import org.apache.druid.math.expr.ExprEval;
import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.math.expr.ExprMacroTable;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Objects;
public abstract class TrimExprMacro implements ExprMacroTable.ExprMacro public abstract class TrimExprMacro implements ExprMacroTable.ExprMacro
{ {
@ -101,7 +104,8 @@ public abstract class TrimExprMacro implements ExprMacroTable.ExprMacro
} }
} }
private static class TrimStaticCharsExpr extends ExprMacroTable.BaseScalarUnivariateMacroFunctionExpr @VisibleForTesting
static class TrimStaticCharsExpr extends ExprMacroTable.BaseScalarUnivariateMacroFunctionExpr
{ {
private final TrimMode mode; private final TrimMode mode;
private final char[] chars; private final char[] chars;
@ -172,9 +176,36 @@ public abstract class TrimExprMacro implements ExprMacroTable.ExprMacro
} }
return super.stringify(); return super.stringify();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
TrimStaticCharsExpr that = (TrimStaticCharsExpr) o;
return mode == that.mode &&
Arrays.equals(chars, that.chars) &&
Objects.equals(charsExpr, that.charsExpr);
} }
private static class TrimDynamicCharsExpr implements Expr @Override
public int hashCode()
{
int result = Objects.hash(super.hashCode(), mode, charsExpr);
result = 31 * result + Arrays.hashCode(chars);
return result;
}
}
@VisibleForTesting
static class TrimDynamicCharsExpr implements Expr
{ {
private final TrimMode mode; private final TrimMode mode;
private final Expr stringExpr; private final Expr stringExpr;
@ -265,6 +296,27 @@ public abstract class TrimExprMacro implements ExprMacroTable.ExprMacro
.with(charsExpr) .with(charsExpr)
.withScalarArguments(ImmutableSet.of(stringExpr, charsExpr)); .withScalarArguments(ImmutableSet.of(stringExpr, charsExpr));
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TrimDynamicCharsExpr that = (TrimDynamicCharsExpr) o;
return mode == that.mode &&
Objects.equals(stringExpr, that.stringExpr) &&
Objects.equals(charsExpr, that.charsExpr);
}
@Override
public int hashCode()
{
return Objects.hash(mode, stringExpr, charsExpr);
}
} }
private static boolean arrayContains(char[] array, char c) private static boolean arrayContains(char[] array, char c)

View File

@ -21,20 +21,22 @@ package org.apache.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Bytes; import com.google.common.primitives.Bytes;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.Arrays; import java.util.Arrays;
import java.util.Objects;
public class CascadeExtractionFn implements ExtractionFn public class CascadeExtractionFn implements ExtractionFn
{ {
private final ExtractionFn[] extractionFns; @VisibleForTesting
private final ChainedExtractionFn chainedExtractionFn; static final ChainedExtractionFn DEFAULT_CHAINED_EXTRACTION_FN = new ChainedExtractionFn(
private final ChainedExtractionFn DEFAULT_CHAINED_EXTRACTION_FN = new ChainedExtractionFn(
new ExtractionFn() new ExtractionFn()
{ {
private static final String NAME = "nullExtractionFn{}";
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
@ -76,12 +78,28 @@ public class CascadeExtractionFn implements ExtractionFn
@Override @Override
public String toString() public String toString()
{ {
return "nullExtractionFn{}"; return NAME;
}
@Override
public int hashCode()
{
return NAME.hashCode();
}
@Override
public boolean equals(Object obj)
{
return obj != null
&& getClass().equals(obj.getClass());
} }
}, },
null null
); );
private final ExtractionFn[] extractionFns;
private final ChainedExtractionFn chainedExtractionFn;
@JsonCreator @JsonCreator
public CascadeExtractionFn( public CascadeExtractionFn(
@JsonProperty("extractionFns") ExtractionFn[] extractionFn @JsonProperty("extractionFns") ExtractionFn[] extractionFn
@ -172,7 +190,9 @@ public class CascadeExtractionFn implements ExtractionFn
@Override @Override
public int hashCode() public int hashCode()
{ {
return chainedExtractionFn.hashCode(); int result = Objects.hash(chainedExtractionFn);
result = 31 * result + Arrays.hashCode(extractionFns);
return result;
} }
@Override @Override
@ -181,7 +201,8 @@ public class CascadeExtractionFn implements ExtractionFn
return "CascadeExtractionFn{extractionFns=[" + chainedExtractionFn + "]}"; return "CascadeExtractionFn{extractionFns=[" + chainedExtractionFn + "]}";
} }
private static class ChainedExtractionFn @VisibleForTesting
static class ChainedExtractionFn
{ {
private final ExtractionFn fn; private final ExtractionFn fn;
private final ChainedExtractionFn child; private final ChainedExtractionFn child;
@ -240,27 +261,15 @@ public class CascadeExtractionFn implements ExtractionFn
if (o == null || getClass() != o.getClass()) { if (o == null || getClass() != o.getClass()) {
return false; return false;
} }
ChainedExtractionFn that = (ChainedExtractionFn) o; ChainedExtractionFn that = (ChainedExtractionFn) o;
return Objects.equals(fn, that.fn) &&
if (!fn.equals(that.fn)) { Objects.equals(child, that.child);
return false;
}
if (child != null && !child.equals(that.child)) {
return false;
}
return true;
} }
@Override @Override
public int hashCode() public int hashCode()
{ {
int result = fn.hashCode(); return Objects.hash(fn, child);
if (child != null) {
result = 31 * result + child.hashCode();
}
return result;
} }
@Override @Override

View File

@ -21,6 +21,7 @@ package org.apache.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import org.apache.druid.guice.annotations.ExtensionPoint; import org.apache.druid.guice.annotations.ExtensionPoint;
import org.apache.druid.java.util.common.Cacheable; import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.query.lookup.LookupExtractionFn; import org.apache.druid.query.lookup.LookupExtractionFn;
@ -54,6 +55,7 @@ import javax.annotation.Nullable;
@JsonSubTypes.Type(name = "bucket", value = BucketExtractionFn.class), @JsonSubTypes.Type(name = "bucket", value = BucketExtractionFn.class),
@JsonSubTypes.Type(name = "strlen", value = StrlenExtractionFn.class) @JsonSubTypes.Type(name = "strlen", value = StrlenExtractionFn.class)
}) })
@SubclassesMustOverrideEqualsAndHashCode
public interface ExtractionFn extends Cacheable public interface ExtractionFn extends Cacheable
{ {
/** /**

View File

@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.StringUtils;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Locale; import java.util.Locale;
import java.util.Objects;
@JsonTypeName("lower") @JsonTypeName("lower")
public class LowerExtractionFn extends DimExtractionFn public class LowerExtractionFn extends DimExtractionFn
@ -80,4 +81,24 @@ public class LowerExtractionFn extends DimExtractionFn
.put(localeBytes) .put(localeBytes)
.array(); .array();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
LowerExtractionFn that = (LowerExtractionFn) o;
return Objects.equals(locale, that.locale) &&
Objects.equals(localeString, that.localeString);
}
@Override
public int hashCode()
{
return Objects.hash(locale, localeString);
}
} }

View File

@ -65,4 +65,16 @@ public class StrlenExtractionFn extends DimExtractionFn
{ {
return new byte[]{ExtractionCacheHelper.CACHE_TYPE_ID_STRLEN}; return new byte[]{ExtractionCacheHelper.CACHE_TYPE_ID_STRLEN};
} }
@Override
public final int hashCode()
{
return StrlenExtractionFn.class.hashCode();
}
@Override
public final boolean equals(Object obj)
{
return obj instanceof StrlenExtractionFn;
}
} }

View File

@ -27,6 +27,7 @@ import org.apache.druid.java.util.common.StringUtils;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Locale; import java.util.Locale;
import java.util.Objects;
@JsonTypeName("upper") @JsonTypeName("upper")
public class UpperExtractionFn extends DimExtractionFn public class UpperExtractionFn extends DimExtractionFn
@ -79,4 +80,24 @@ public class UpperExtractionFn extends DimExtractionFn
.put(localeBytes) .put(localeBytes)
.array(); .array();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpperExtractionFn that = (UpperExtractionFn) o;
return Objects.equals(locale, that.locale) &&
Objects.equals(localeString, that.localeString);
}
@Override
public int hashCode()
{
return Objects.hash(locale, localeString);
}
} }

View File

@ -20,7 +20,9 @@
package org.apache.druid.query.filter; package org.apache.druid.query.filter;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
@SubclassesMustOverrideEqualsAndHashCode
public interface DruidPredicateFactory public interface DruidPredicateFactory
{ {
Predicate<String> makeStringPredicate(); Predicate<String> makeStringPredicate();

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.filter; package org.apache.druid.query.filter;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import org.apache.druid.collections.bitmap.ImmutableBitmap; import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.java.util.common.UOE; import org.apache.druid.java.util.common.UOE;
import org.apache.druid.query.BitmapResultFactory; import org.apache.druid.query.BitmapResultFactory;
@ -31,6 +32,7 @@ import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@SubclassesMustOverrideEqualsAndHashCode
public interface Filter public interface Filter
{ {
/** /**

View File

@ -21,6 +21,7 @@ package org.apache.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.Objects; import java.util.Objects;
@ -45,6 +46,7 @@ import java.util.Objects;
* As such, it is currently undocumented in user facing documentation on purpose, but whatever this turns into once more * As such, it is currently undocumented in user facing documentation on purpose, but whatever this turns into once more
* automatic usage of this is in place, should be documented in a future release. * automatic usage of this is in place, should be documented in a future release.
*/ */
@SubclassesMustOverrideEqualsAndHashCode
public class FilterTuning public class FilterTuning
{ {
public static FilterTuning createDefault(Filter filter, BitmapIndexSelector selector) public static FilterTuning createDefault(Filter filter, BitmapIndexSelector selector)

View File

@ -284,6 +284,11 @@ public class LikeDimFilter implements DimFilter
} }
public boolean matches(@Nullable final String s) public boolean matches(@Nullable final String s)
{
return matches(s, pattern);
}
private static boolean matches(@Nullable final String s, Pattern pattern)
{ {
String val = NullHandling.nullToEmptyIfNeeded(s); String val = NullHandling.nullToEmptyIfNeeded(s);
return val != null && pattern.matcher(val).matches(); return val != null && pattern.matcher(val).matches();
@ -310,48 +315,7 @@ public class LikeDimFilter implements DimFilter
public DruidPredicateFactory predicateFactory(final ExtractionFn extractionFn) public DruidPredicateFactory predicateFactory(final ExtractionFn extractionFn)
{ {
return new DruidPredicateFactory() return new PatternDruidPredicateFactory(extractionFn, pattern);
{
@Override
public Predicate<String> makeStringPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input));
} else {
return input -> matches(input);
}
}
@Override
public DruidLongPredicate makeLongPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input));
} else {
return input -> matches(String.valueOf(input));
}
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input));
} else {
return input -> matches(String.valueOf(input));
}
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input));
} else {
return input -> matches(String.valueOf(input));
}
}
};
} }
public String getPrefix() public String getPrefix()
@ -364,6 +328,79 @@ public class LikeDimFilter implements DimFilter
return suffixMatch; return suffixMatch;
} }
@VisibleForTesting
static class PatternDruidPredicateFactory implements DruidPredicateFactory
{
private final ExtractionFn extractionFn;
private final Pattern pattern;
PatternDruidPredicateFactory(ExtractionFn extractionFn, Pattern pattern)
{
this.extractionFn = extractionFn;
this.pattern = pattern;
}
@Override
public Predicate<String> makeStringPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input), pattern);
} else {
return input -> matches(input, pattern);
}
}
@Override
public DruidLongPredicate makeLongPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input), pattern);
} else {
return input -> matches(String.valueOf(input), pattern);
}
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input), pattern);
} else {
return input -> matches(String.valueOf(input), pattern);
}
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
if (extractionFn != null) {
return input -> matches(extractionFn.apply(input), pattern);
} else {
return input -> matches(String.valueOf(input), pattern);
}
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PatternDruidPredicateFactory that = (PatternDruidPredicateFactory) o;
return Objects.equals(extractionFn, that.extractionFn) &&
Objects.equals(pattern.toString(), that.pattern.toString());
}
@Override
public int hashCode()
{
return Objects.hash(extractionFn, pattern.toString());
}
}
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
{ {

View File

@ -24,6 +24,7 @@ import com.google.common.base.Predicates;
import org.apache.druid.segment.DimensionHandlerUtils; import org.apache.druid.segment.DimensionHandlerUtils;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.Objects;
/** /**
* A {@link DruidPredicateFactory} that checks if input values equal a specific, provided value. Initialization work * A {@link DruidPredicateFactory} that checks if input values equal a specific, provided value. Initialization work
@ -147,4 +148,23 @@ public class SelectorPredicateFactory implements DruidPredicateFactory
} }
} }
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SelectorPredicateFactory that = (SelectorPredicateFactory) o;
return Objects.equals(value, that.value);
}
@Override
public int hashCode()
{
return Objects.hash(value);
}
} }

View File

@ -21,6 +21,7 @@ package org.apache.druid.query.search;
import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.annotations.SubclassesMustOverrideEqualsAndHashCode;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@ -34,6 +35,7 @@ import javax.annotation.Nullable;
@JsonSubTypes.Type(name = "regex", value = RegexSearchQuerySpec.class), @JsonSubTypes.Type(name = "regex", value = RegexSearchQuerySpec.class),
@JsonSubTypes.Type(name = "all", value = AllSearchQuerySpec.class) @JsonSubTypes.Type(name = "all", value = AllSearchQuerySpec.class)
}) })
@SubclassesMustOverrideEqualsAndHashCode
public interface SearchQuerySpec public interface SearchQuerySpec
{ {
boolean accept(@Nullable String dimVal); boolean accept(@Nullable String dimVal);

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.filter; package org.apache.druid.segment.filter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import it.unimi.dsi.fastutil.ints.IntList; import it.unimi.dsi.fastutil.ints.IntList;
@ -47,7 +48,6 @@ import org.apache.druid.segment.IntListUtils;
import org.apache.druid.segment.column.BitmapIndex; import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.vector.VectorColumnSelectorFactory; import org.apache.druid.segment.vector.VectorColumnSelectorFactory;
import java.util.Comparator;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
@ -55,22 +55,13 @@ import java.util.Set;
public class BoundFilter implements Filter public class BoundFilter implements Filter
{ {
private final BoundDimFilter boundDimFilter; private final BoundDimFilter boundDimFilter;
private final Comparator<String> comparator;
private final ExtractionFn extractionFn; private final ExtractionFn extractionFn;
private final FilterTuning filterTuning; private final FilterTuning filterTuning;
private final Supplier<DruidLongPredicate> longPredicateSupplier;
private final Supplier<DruidFloatPredicate> floatPredicateSupplier;
private final Supplier<DruidDoublePredicate> doublePredicateSupplier;
public BoundFilter(final BoundDimFilter boundDimFilter) public BoundFilter(final BoundDimFilter boundDimFilter)
{ {
this.boundDimFilter = boundDimFilter; this.boundDimFilter = boundDimFilter;
this.comparator = boundDimFilter.getOrdering();
this.extractionFn = boundDimFilter.getExtractionFn(); this.extractionFn = boundDimFilter.getExtractionFn();
this.longPredicateSupplier = boundDimFilter.getLongPredicateSupplier();
this.floatPredicateSupplier = boundDimFilter.getFloatPredicateSupplier();
this.doublePredicateSupplier = boundDimFilter.getDoublePredicateSupplier();
this.filterTuning = boundDimFilter.getFilterTuning(); this.filterTuning = boundDimFilter.getFilterTuning();
} }
@ -81,7 +72,7 @@ public class BoundFilter implements Filter
final BitmapIndex bitmapIndex = selector.getBitmapIndex(boundDimFilter.getDimension()); final BitmapIndex bitmapIndex = selector.getBitmapIndex(boundDimFilter.getDimension());
if (bitmapIndex == null || bitmapIndex.getCardinality() == 0) { if (bitmapIndex == null || bitmapIndex.getCardinality() == 0) {
if (doesMatch(null)) { if (doesMatchNull()) {
return bitmapResultFactory.wrapAllTrue(Filters.allTrue(selector)); return bitmapResultFactory.wrapAllTrue(Filters.allTrue(selector));
} else { } else {
return bitmapResultFactory.wrapAllFalse(Filters.allFalse(selector)); return bitmapResultFactory.wrapAllFalse(Filters.allFalse(selector));
@ -106,7 +97,7 @@ public class BoundFilter implements Filter
final BitmapIndex bitmapIndex = indexSelector.getBitmapIndex(boundDimFilter.getDimension()); final BitmapIndex bitmapIndex = indexSelector.getBitmapIndex(boundDimFilter.getDimension());
if (bitmapIndex == null || bitmapIndex.getCardinality() == 0) { if (bitmapIndex == null || bitmapIndex.getCardinality() == 0) {
return doesMatch(null) ? 1. : 0.; return doesMatchNull() ? 1. : 0.;
} }
return Filters.estimateSelectivity( return Filters.estimateSelectivity(
@ -265,57 +256,15 @@ public class BoundFilter implements Filter
private DruidPredicateFactory getPredicateFactory() private DruidPredicateFactory getPredicateFactory()
{ {
return new DruidPredicateFactory() return new BoundDimFilterDruidPredicateFactory(extractionFn, boundDimFilter);
{
@Override
public Predicate<String> makeStringPredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input));
}
return input -> doesMatch(input);
} }
@Override private boolean doesMatchNull()
public DruidLongPredicate makeLongPredicate()
{ {
if (extractionFn != null) { return doesMatch(null, boundDimFilter);
return input -> doesMatch(extractionFn.apply(input));
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return longPredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input));
} }
@Override private static boolean doesMatch(String input, BoundDimFilter boundDimFilter)
public DruidFloatPredicate makeFloatPredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input));
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return floatPredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input));
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input));
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return doublePredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input));
}
};
}
private boolean doesMatch(String input)
{ {
if (input == null) { if (input == null) {
return (!boundDimFilter.hasLowerBound() return (!boundDimFilter.hasLowerBound()
@ -328,10 +277,10 @@ public class BoundFilter implements Filter
int lowerComparing = 1; int lowerComparing = 1;
int upperComparing = 1; int upperComparing = 1;
if (boundDimFilter.hasLowerBound()) { if (boundDimFilter.hasLowerBound()) {
lowerComparing = comparator.compare(input, boundDimFilter.getLower()); lowerComparing = boundDimFilter.getOrdering().compare(input, boundDimFilter.getLower());
} }
if (boundDimFilter.hasUpperBound()) { if (boundDimFilter.hasUpperBound()) {
upperComparing = comparator.compare(boundDimFilter.getUpper(), input); upperComparing = boundDimFilter.getOrdering().compare(boundDimFilter.getUpper(), input);
} }
if (boundDimFilter.isLowerStrict() && boundDimFilter.isUpperStrict()) { if (boundDimFilter.isLowerStrict() && boundDimFilter.isUpperStrict()) {
return ((lowerComparing > 0)) && (upperComparing > 0); return ((lowerComparing > 0)) && (upperComparing > 0);
@ -354,7 +303,6 @@ public class BoundFilter implements Filter
} }
BoundFilter that = (BoundFilter) o; BoundFilter that = (BoundFilter) o;
return Objects.equals(boundDimFilter, that.boundDimFilter) && return Objects.equals(boundDimFilter, that.boundDimFilter) &&
Objects.equals(comparator, that.comparator) &&
Objects.equals(extractionFn, that.extractionFn) && Objects.equals(extractionFn, that.extractionFn) &&
Objects.equals(filterTuning, that.filterTuning); Objects.equals(filterTuning, that.filterTuning);
} }
@ -362,6 +310,91 @@ public class BoundFilter implements Filter
@Override @Override
public int hashCode() public int hashCode()
{ {
return Objects.hash(boundDimFilter, comparator, extractionFn, filterTuning); return Objects.hash(boundDimFilter, extractionFn, filterTuning);
}
@VisibleForTesting
static class BoundDimFilterDruidPredicateFactory implements DruidPredicateFactory
{
private final ExtractionFn extractionFn;
private final BoundDimFilter boundDimFilter;
private final Supplier<DruidLongPredicate> longPredicateSupplier;
private final Supplier<DruidFloatPredicate> floatPredicateSupplier;
private final Supplier<DruidDoublePredicate> doublePredicateSupplier;
BoundDimFilterDruidPredicateFactory(ExtractionFn extractionFn, BoundDimFilter boundDimFilter)
{
this.extractionFn = extractionFn;
this.boundDimFilter = boundDimFilter;
this.longPredicateSupplier = boundDimFilter.getLongPredicateSupplier();
this.floatPredicateSupplier = boundDimFilter.getFloatPredicateSupplier();
this.doublePredicateSupplier = boundDimFilter.getDoublePredicateSupplier();
}
@Override
public Predicate<String> makeStringPredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input), boundDimFilter);
}
return input -> doesMatch(input, boundDimFilter);
}
@Override
public DruidLongPredicate makeLongPredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input), boundDimFilter);
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return longPredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input), boundDimFilter);
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input), boundDimFilter);
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return floatPredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input), boundDimFilter);
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
if (extractionFn != null) {
return input -> doesMatch(extractionFn.apply(input), boundDimFilter);
}
if (boundDimFilter.getOrdering().equals(StringComparators.NUMERIC)) {
return doublePredicateSupplier.get();
}
return input -> doesMatch(String.valueOf(input), boundDimFilter);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BoundDimFilterDruidPredicateFactory that = (BoundDimFilterDruidPredicateFactory) o;
return Objects.equals(extractionFn, that.extractionFn) &&
Objects.equals(boundDimFilter, that.boundDimFilter);
}
@Override
public int hashCode()
{
return Objects.hash(extractionFn, boundDimFilter);
}
} }
} }

View File

@ -163,6 +163,26 @@ public class ColumnComparisonFilter implements Filter
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ColumnComparisonFilter that = (ColumnComparisonFilter) o;
return Objects.equals(dimensions, that.dimensions);
}
@Override
public int hashCode()
{
return Objects.hash(dimensions);
}
private static class ColumnComparisonReaderFactory implements ColumnProcessorFactory<Supplier<String[]>> private static class ColumnComparisonReaderFactory implements ColumnProcessorFactory<Supplier<String[]>>
{ {
private static final ColumnComparisonReaderFactory INSTANCE = new ColumnComparisonReaderFactory(); private static final ColumnComparisonReaderFactory INSTANCE = new ColumnComparisonReaderFactory();

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.filter; package org.apache.druid.segment.filter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
@ -78,73 +79,7 @@ public class DimensionPredicateFilter implements Filter
if (extractionFn == null) { if (extractionFn == null) {
this.predicateFactory = predicateFactory; this.predicateFactory = predicateFactory;
} else { } else {
this.predicateFactory = new DruidPredicateFactory() this.predicateFactory = new DelegatingStringPredicateFactory(predicateFactory, extractionFn);
{
final Predicate<String> baseStringPredicate = predicateFactory.makeStringPredicate();
@Override
public Predicate<String> makeStringPredicate()
{
return input -> baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public DruidLongPredicate makeLongPredicate()
{
return new DruidLongPredicate()
{
@Override
public boolean applyLong(long input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
return new DruidFloatPredicate()
{
@Override
public boolean applyFloat(float input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
return new DruidDoublePredicate()
{
@Override
public boolean applyDouble(double input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
};
} }
} }
@ -220,6 +155,104 @@ public class DimensionPredicateFilter implements Filter
} }
} }
@VisibleForTesting
static class DelegatingStringPredicateFactory implements DruidPredicateFactory
{
private final Predicate<String> baseStringPredicate;
private final DruidPredicateFactory predicateFactory;
private final ExtractionFn extractionFn;
DelegatingStringPredicateFactory(DruidPredicateFactory predicateFactory, ExtractionFn extractionFn)
{
this.predicateFactory = predicateFactory;
this.baseStringPredicate = predicateFactory.makeStringPredicate();
this.extractionFn = extractionFn;
}
@Override
public Predicate<String> makeStringPredicate()
{
return input -> baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public DruidLongPredicate makeLongPredicate()
{
return new DruidLongPredicate()
{
@Override
public boolean applyLong(long input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
return new DruidFloatPredicate()
{
@Override
public boolean applyFloat(float input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
return new DruidDoublePredicate()
{
@Override
public boolean applyDouble(double input)
{
return baseStringPredicate.apply(extractionFn.apply(input));
}
@Override
public boolean applyNull()
{
return baseStringPredicate.apply(extractionFn.apply(null));
}
};
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DelegatingStringPredicateFactory that = (DelegatingStringPredicateFactory) o;
return Objects.equals(predicateFactory, that.predicateFactory) &&
Objects.equals(extractionFn, that.extractionFn);
}
@Override
public int hashCode()
{
return Objects.hash(predicateFactory, extractionFn);
}
}
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
{ {

View File

@ -39,6 +39,7 @@ import org.apache.druid.segment.ColumnValueSelector;
import org.apache.druid.segment.virtual.ExpressionSelectors; import org.apache.druid.segment.virtual.ExpressionSelectors;
import java.util.Arrays; import java.util.Arrays;
import java.util.Objects;
import java.util.Set; import java.util.Set;
public class ExpressionFilter implements Filter public class ExpressionFilter implements Filter
@ -183,4 +184,34 @@ public class ExpressionFilter implements Filter
// We could support this, but need a good approach to rewriting the identifiers within an expression. // We could support this, but need a good approach to rewriting the identifiers within an expression.
return false; return false;
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ExpressionFilter that = (ExpressionFilter) o;
return Objects.equals(expr, that.expr) &&
Objects.equals(filterTuning, that.filterTuning);
}
@Override
public int hashCode()
{
return Objects.hash(expr, filterTuning);
}
@Override
public String toString()
{
return "ExpressionFilter{" +
"expr=" + expr +
", requiredBindings=" + requiredBindings +
", filterTuning=" + filterTuning +
'}';
}
} }

View File

@ -117,4 +117,16 @@ public class FalseFilter implements Filter
{ {
return "false"; return "false";
} }
@Override
public final int hashCode()
{
return FalseFilter.class.hashCode();
}
@Override
public final boolean equals(Object obj)
{
return obj instanceof FalseFilter;
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.filter; package org.apache.druid.segment.filter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
@ -133,14 +134,9 @@ public class InFilter implements Filter
private IntIterable getBitmapIndexIterable(final BitmapIndex bitmapIndex) private IntIterable getBitmapIndexIterable(final BitmapIndex bitmapIndex)
{ {
return new IntIterable() return () -> new IntIterator()
{ {
@Override final Iterator<String> iterator = values.iterator();
public IntIterator iterator()
{
return new IntIterator()
{
Iterator<String> iterator = values.iterator();
@Override @Override
public boolean hasNext() public boolean hasNext()
@ -155,8 +151,6 @@ public class InFilter implements Filter
} }
}; };
} }
};
}
@Override @Override
public ValueMatcher makeMatcher(ColumnSelectorFactory factory) public ValueMatcher makeMatcher(ColumnSelectorFactory factory)
@ -231,8 +225,55 @@ public class InFilter implements Filter
private DruidPredicateFactory getPredicateFactory() private DruidPredicateFactory getPredicateFactory()
{ {
return new DruidPredicateFactory() return new InFilterDruidPredicateFactory(extractionFn, values, longPredicateSupplier, floatPredicateSupplier, doublePredicateSupplier);
}
@Override
public boolean equals(Object o)
{ {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
InFilter inFilter = (InFilter) o;
return Objects.equals(dimension, inFilter.dimension) &&
Objects.equals(values, inFilter.values) &&
Objects.equals(extractionFn, inFilter.extractionFn) &&
Objects.equals(filterTuning, inFilter.filterTuning);
}
@Override
public int hashCode()
{
return Objects.hash(dimension, values, extractionFn, filterTuning);
}
@VisibleForTesting
static class InFilterDruidPredicateFactory implements DruidPredicateFactory
{
private final ExtractionFn extractionFn;
private final Set<String> values;
private final Supplier<DruidLongPredicate> longPredicateSupplier;
private final Supplier<DruidFloatPredicate> floatPredicateSupplier;
private final Supplier<DruidDoublePredicate> doublePredicateSupplier;
InFilterDruidPredicateFactory(
ExtractionFn extractionFn,
Set<String> values,
Supplier<DruidLongPredicate> longPredicateSupplier,
Supplier<DruidFloatPredicate> floatPredicateSupplier,
Supplier<DruidDoublePredicate> doublePredicateSupplier
)
{
this.extractionFn = extractionFn;
this.values = values;
this.longPredicateSupplier = longPredicateSupplier;
this.floatPredicateSupplier = floatPredicateSupplier;
this.doublePredicateSupplier = doublePredicateSupplier;
}
@Override @Override
public Predicate<String> makeStringPredicate() public Predicate<String> makeStringPredicate()
{ {
@ -271,8 +312,6 @@ public class InFilter implements Filter
} }
return input -> doublePredicateSupplier.get().applyDouble(input); return input -> doublePredicateSupplier.get().applyDouble(input);
} }
};
}
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
@ -283,16 +322,15 @@ public class InFilter implements Filter
if (o == null || getClass() != o.getClass()) { if (o == null || getClass() != o.getClass()) {
return false; return false;
} }
InFilter inFilter = (InFilter) o; InFilterDruidPredicateFactory that = (InFilterDruidPredicateFactory) o;
return Objects.equals(dimension, inFilter.dimension) && return Objects.equals(extractionFn, that.extractionFn) &&
Objects.equals(values, inFilter.values) && Objects.equals(values, that.values);
Objects.equals(extractionFn, inFilter.extractionFn) &&
Objects.equals(filterTuning, inFilter.filterTuning);
} }
@Override @Override
public int hashCode() public int hashCode()
{ {
return Objects.hash(dimension, values, extractionFn, filterTuning); return Objects.hash(extractionFn, values);
}
} }
} }

View File

@ -31,6 +31,7 @@ import org.apache.druid.segment.ColumnSelector;
import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnSelectorFactory;
import org.mozilla.javascript.Context; import org.mozilla.javascript.Context;
import java.util.Objects;
import java.util.Set; import java.util.Set;
public class JavaScriptFilter implements Filter public class JavaScriptFilter implements Filter
@ -116,4 +117,25 @@ public class JavaScriptFilter implements Filter
{ {
return ImmutableSet.of(dimension); return ImmutableSet.of(dimension);
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
JavaScriptFilter that = (JavaScriptFilter) o;
return Objects.equals(dimension, that.dimension) &&
Objects.equals(predicateFactory, that.predicateFactory) &&
Objects.equals(filterTuning, that.filterTuning);
}
@Override
public int hashCode()
{
return Objects.hash(dimension, predicateFactory, filterTuning);
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.filter; package org.apache.druid.segment.filter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.extraction.ExtractionFn;
@ -48,8 +49,23 @@ public class RegexFilter extends DimensionPredicateFilter
{ {
super( super(
dimension, dimension,
new DruidPredicateFactory() new PatternDruidPredicateFactory(pattern),
extractionFn,
filterTuning
);
this.pattern = pattern;
}
@VisibleForTesting
static class PatternDruidPredicateFactory implements DruidPredicateFactory
{ {
private final Pattern pattern;
PatternDruidPredicateFactory(Pattern pattern)
{
this.pattern = pattern;
}
@Override @Override
public Predicate<String> makeStringPredicate() public Predicate<String> makeStringPredicate()
{ {
@ -74,6 +90,34 @@ public class RegexFilter extends DimensionPredicateFilter
return input -> pattern.matcher(String.valueOf(input)).find(); return input -> pattern.matcher(String.valueOf(input)).find();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PatternDruidPredicateFactory that = (PatternDruidPredicateFactory) o;
return Objects.equals(pattern.toString(), that.pattern.toString());
}
@Override
public int hashCode()
{
return Objects.hash(pattern.toString());
}
@Override
public String toString()
{
return "RegexFilter$PatternDruidPredicateFactory{" +
"pattern='" + pattern + '\'' +
'}';
}
}
@Override @Override
public String toString() public String toString()
{ {
@ -81,12 +125,6 @@ public class RegexFilter extends DimensionPredicateFilter
"pattern='" + pattern + '\'' + "pattern='" + pattern + '\'' +
'}'; '}';
} }
},
extractionFn,
filterTuning
);
this.pattern = pattern;
}
@Override @Override
public boolean supportsRequiredColumnRewrite() public boolean supportsRequiredColumnRewrite()

View File

@ -21,6 +21,7 @@ package org.apache.druid.segment.filter;
import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.query.extraction.ExtractionFn; import org.apache.druid.query.extraction.ExtractionFn;
@ -51,40 +52,7 @@ public class SearchQueryFilter extends DimensionPredicateFilter
{ {
super( super(
dimension, dimension,
new DruidPredicateFactory() new SearchQueryDruidPredicateFactory(query),
{
@Override
public Predicate<String> makeStringPredicate()
{
return input -> query.accept(input);
}
@Override
public DruidLongPredicate makeLongPredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public String toString()
{
return "SearchFilter{" +
"query='" + query + '\'' +
'}';
}
},
extractionFn, extractionFn,
filterTuning filterTuning
); );
@ -119,6 +87,14 @@ public class SearchQueryFilter extends DimensionPredicateFilter
); );
} }
@Override
public String toString()
{
return "SearchFilter{" +
"query='" + query + '\'' +
'}';
}
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
{ {
@ -140,4 +116,58 @@ public class SearchQueryFilter extends DimensionPredicateFilter
{ {
return Objects.hash(super.hashCode(), query); return Objects.hash(super.hashCode(), query);
} }
@VisibleForTesting
static class SearchQueryDruidPredicateFactory implements DruidPredicateFactory
{
private final SearchQuerySpec query;
SearchQueryDruidPredicateFactory(SearchQuerySpec query)
{
this.query = query;
}
@Override
public Predicate<String> makeStringPredicate()
{
return input -> query.accept(input);
}
@Override
public DruidLongPredicate makeLongPredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
return input -> query.accept(String.valueOf(input));
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SearchQueryDruidPredicateFactory that = (SearchQueryDruidPredicateFactory) o;
return Objects.equals(query, that.query);
}
@Override
public int hashCode()
{
return Objects.hash(query);
}
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.segment.filter; package org.apache.druid.segment.filter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
@ -37,6 +38,7 @@ import org.apache.druid.segment.ColumnSelector;
import org.apache.druid.segment.ColumnSelectorFactory; import org.apache.druid.segment.ColumnSelectorFactory;
import org.apache.druid.segment.incremental.SpatialDimensionRowTransformer; import org.apache.druid.segment.incremental.SpatialDimensionRowTransformer;
import java.util.Objects;
import java.util.Set; import java.util.Set;
/** /**
@ -71,41 +73,8 @@ public class SpatialFilter implements Filter
return Filters.makeValueMatcher( return Filters.makeValueMatcher(
factory, factory,
dimension, dimension,
new DruidPredicateFactory() new BoundDruidPredicateFactory(bound)
{
@Override
public Predicate<String> makeStringPredicate()
{
return input -> {
if (input == null) {
return false;
}
final float[] coordinate = SpatialDimensionRowTransformer.decode(input);
return bound.contains(coordinate);
};
}
@Override
public DruidLongPredicate makeLongPredicate()
{
// SpatialFilter does not currently support longs
return DruidLongPredicate.ALWAYS_FALSE;
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
// SpatialFilter does not currently support floats
return DruidFloatPredicate.ALWAYS_FALSE;
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
// SpatialFilter does not currently support doubles
return DruidDoublePredicate.ALWAYS_FALSE;
}
}
); );
} }
@ -139,4 +108,88 @@ public class SpatialFilter implements Filter
// selectivity estimation for multi-value columns is not implemented yet. // selectivity estimation for multi-value columns is not implemented yet.
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SpatialFilter that = (SpatialFilter) o;
return Objects.equals(dimension, that.dimension) &&
Objects.equals(bound, that.bound) &&
Objects.equals(filterTuning, that.filterTuning);
}
@Override
public int hashCode()
{
return Objects.hash(dimension, bound, filterTuning);
}
@VisibleForTesting
static class BoundDruidPredicateFactory implements DruidPredicateFactory
{
private final Bound bound;
BoundDruidPredicateFactory(Bound bound)
{
this.bound = bound;
}
@Override
public Predicate<String> makeStringPredicate()
{
return input -> {
if (input == null) {
return false;
}
final float[] coordinate = SpatialDimensionRowTransformer.decode(input);
return bound.contains(coordinate);
};
}
@Override
public DruidLongPredicate makeLongPredicate()
{
// SpatialFilter does not currently support longs
return DruidLongPredicate.ALWAYS_FALSE;
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
// SpatialFilter does not currently support floats
return DruidFloatPredicate.ALWAYS_FALSE;
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
// SpatialFilter does not currently support doubles
return DruidDoublePredicate.ALWAYS_FALSE;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BoundDruidPredicateFactory that = (BoundDruidPredicateFactory) o;
return Objects.equals(bound, that.bound);
}
@Override
public int hashCode()
{
return Objects.hash(bound);
}
}
} }

View File

@ -119,4 +119,16 @@ public class TrueFilter implements Filter
{ {
return "true"; return "true";
} }
@Override
public final int hashCode()
{
return TrueFilter.class.hashCode();
}
@Override
public final boolean equals(Object obj)
{
return obj instanceof TrueFilter;
}
} }

View File

@ -21,6 +21,7 @@ package org.apache.druid.segment.join;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.java.util.common.guava.Sequences;
@ -46,6 +47,7 @@ import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
@ -207,6 +209,13 @@ public class HashJoinSegmentStorageAdapter implements StorageAdapter
@Nullable final QueryMetrics<?> queryMetrics @Nullable final QueryMetrics<?> queryMetrics
) )
{ {
if (!Objects.equals(joinFilterPreAnalysis.getOriginalFilter(), filter)) {
throw new ISE(
"Filter provided to cursor [%s] does not match join pre-analysis filter [%s]",
filter,
joinFilterPreAnalysis.getOriginalFilter()
);
}
final List<VirtualColumn> preJoinVirtualColumns = new ArrayList<>(); final List<VirtualColumn> preJoinVirtualColumns = new ArrayList<>();
final List<VirtualColumn> postJoinVirtualColumns = new ArrayList<>(); final List<VirtualColumn> postJoinVirtualColumns = new ArrayList<>();

View File

@ -0,0 +1,66 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.annotations;
import org.junit.Assert;
import org.junit.Test;
import org.reflections.Reflections;
import org.reflections.util.ClasspathHelper;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
public class SubclassesMustOverrideEqualsAndHashCodeTest
{
@Test
public void testEqualsAndHashCode() throws NoSuchMethodException
{
// Exclude test classes
Set<URL> urls = ClasspathHelper.forPackage("org.apache.druid")
.stream()
.filter(url -> !url.toString().contains("/target/test-classes"))
.collect(Collectors.toSet());
Reflections reflections = new Reflections(urls);
Set<Class<?>> classes = reflections.getTypesAnnotatedWith(SubclassesMustOverrideEqualsAndHashCode.class);
Set<String> failed = new HashSet<>();
for (Class<?> clazz : classes) {
if (clazz.isInterface() || Modifier.isAbstract(clazz.getModifiers())) {
continue;
}
Method m = clazz.getMethod("hashCode");
String className = clazz.getName();
try {
Assert.assertNotSame(className + " does not implment hashCode", Object.class, m.getDeclaringClass());
}
catch (AssertionError e) {
failed.add(className);
}
}
if (!failed.isEmpty()) {
System.err.println("failed classes [" + failed.size() + "] : ");
failed.forEach(c -> System.err.println("\t" + c));
Assert.fail();
}
}
}

View File

@ -19,6 +19,7 @@
package org.apache.druid.collections.spatial.search; package org.apache.druid.collections.spatial.search;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -77,4 +78,12 @@ public class PolygonBoundTest
Assert.assertTrue(rightTriangle.contains(new float[]{3f, 3f - delta})); Assert.assertTrue(rightTriangle.contains(new float[]{3f, 3f - delta}));
Assert.assertFalse(rightTriangle.contains(new float[]{3f, 3f + delta})); Assert.assertFalse(rightTriangle.contains(new float[]{3f, 3f + delta}));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(PolygonBound.class)
.usingGetClass()
.verify();
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.collections.spatial.search; package org.apache.druid.collections.spatial.search;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -46,4 +47,12 @@ public class RectangularBoundTest
new RectangularBound(new float[]{1F, 1F}, new float[]{2F, 2F}, 2).getCacheKey() new RectangularBound(new float[]{1F, 1F}, new float[]{2F, 2F}, 2).getCacheKey()
)); ));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(RectangularBound.class)
.usingGetClass()
.verify();
}
} }

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.segment.DimensionSelector; import org.apache.druid.segment.DimensionSelector;
import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.data.IndexedInts; import org.apache.druid.segment.data.IndexedInts;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -30,7 +31,7 @@ import java.util.Arrays;
/** /**
*/ */
public class RegexFilteredDimensionSpecTest public class RegexFilteredDimensionSpecTest extends InitializedNullHandlingTest
{ {
@Test @Test

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.expression;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class TimestampCeilExprMacroTest
{
@Test
public void testEqualsContractForTimestampCeilExpr()
{
EqualsVerifier.forClass(TimestampCeilExprMacro.TimestampCeilExpr.class)
.withIgnoredFields("analyzeInputsSupplier")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForTimestampCeilDynamicExpr()
{
EqualsVerifier.forClass(TimestampCeilExprMacro.TimestampCeilDynamicExpr.class)
.withIgnoredFields("analyzeInputsSupplier")
.usingGetClass()
.verify();
}
}

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.expression;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class TimestampFloorExprMacroTest
{
@Test
public void testEqualsContractForTimestampFloorExpr()
{
EqualsVerifier.forClass(TimestampFloorExprMacro.TimestampFloorExpr.class)
.usingGetClass()
.withIgnoredFields("analyzeInputsSupplier")
.verify();
}
@Test
public void testEqualsContractForTimestampFloorDynamicExpr()
{
EqualsVerifier.forClass(TimestampFloorExprMacro.TimestampFloorDynamicExpr.class)
.withIgnoredFields("analyzeInputsSupplier")
.usingGetClass()
.verify();
}
}

View File

@ -0,0 +1,43 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.expression;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class TrimExprMacroTest
{
@Test
public void testEqualsContractForTrimStaticCharsExpr()
{
EqualsVerifier.forClass(TrimExprMacro.TrimStaticCharsExpr.class)
.withIgnoredFields("analyzeInputsSupplier")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForTrimDynamicCharsExpr()
{
EqualsVerifier.forClass(TrimExprMacro.TrimDynamicCharsExpr.class)
.usingGetClass()
.verify();
}
}

View File

@ -23,8 +23,10 @@ import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.js.JavaScriptConfig; import org.apache.druid.js.JavaScriptConfig;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -32,7 +34,7 @@ import java.util.Arrays;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.Set; import java.util.Set;
public class CascadeExtractionFnTest public class CascadeExtractionFnTest extends InitializedNullHandlingTest
{ {
private static final String[] PATHS = { private static final String[] PATHS = {
"/druid/prod/historical", "/druid/prod/historical",
@ -205,4 +207,38 @@ public class CascadeExtractionFnTest
) )
); );
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(CascadeExtractionFn.class)
.withPrefabValues(
CascadeExtractionFn.ChainedExtractionFn.class,
CascadeExtractionFn.DEFAULT_CHAINED_EXTRACTION_FN,
new CascadeExtractionFn.ChainedExtractionFn(
StrlenExtractionFn.instance(),
CascadeExtractionFn.DEFAULT_CHAINED_EXTRACTION_FN
)
)
.withNonnullFields("chainedExtractionFn")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForChainedExtractionFn()
{
EqualsVerifier.forClass(CascadeExtractionFn.ChainedExtractionFn.class)
.withPrefabValues(
CascadeExtractionFn.ChainedExtractionFn.class,
CascadeExtractionFn.DEFAULT_CHAINED_EXTRACTION_FN,
new CascadeExtractionFn.ChainedExtractionFn(
StrlenExtractionFn.instance(),
CascadeExtractionFn.DEFAULT_CHAINED_EXTRACTION_FN
)
)
.withNonnullFields("fn")
.usingGetClass()
.verify();
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.extraction; package org.apache.druid.query.extraction;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -45,4 +46,12 @@ public class LowerExtractionFnTest
Assert.assertArrayEquals(extractionFn.getCacheKey(), extractionFn.getCacheKey()); Assert.assertArrayEquals(extractionFn.getCacheKey(), extractionFn.getCacheKey());
Assert.assertFalse(Arrays.equals(extractionFn.getCacheKey(), new UpperExtractionFn(null).getCacheKey())); Assert.assertFalse(Arrays.equals(extractionFn.getCacheKey(), new UpperExtractionFn(null).getCacheKey()));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(LowerExtractionFn.class)
.usingGetClass()
.verify();
}
} }

View File

@ -20,6 +20,7 @@
package org.apache.druid.query.extraction; package org.apache.druid.query.extraction;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper;
import org.junit.Assert; import org.junit.Assert;
@ -63,4 +64,10 @@ public class StrlenExtractionFnTest
Assert.assertTrue(extractionFn == extractionFnRoundTrip); Assert.assertTrue(extractionFn == extractionFnRoundTrip);
Assert.assertTrue(extractionFn == StrlenExtractionFn.instance()); Assert.assertTrue(extractionFn == StrlenExtractionFn.instance());
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(StrlenExtractionFn.class).verify();
}
} }

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.extraction; package org.apache.druid.query.extraction;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -45,4 +46,12 @@ public class UpperExtractionFnTest
Assert.assertArrayEquals(extractionFn.getCacheKey(), extractionFn.getCacheKey()); Assert.assertArrayEquals(extractionFn.getCacheKey(), extractionFn.getCacheKey());
Assert.assertFalse(Arrays.equals(extractionFn.getCacheKey(), new LowerExtractionFn(null).getCacheKey())); Assert.assertFalse(Arrays.equals(extractionFn.getCacheKey(), new LowerExtractionFn(null).getCacheKey()));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(UpperExtractionFn.class)
.usingGetClass()
.verify();
}
} }

View File

@ -24,13 +24,14 @@ import com.google.common.collect.Sets;
import nl.jqno.equalsverifier.EqualsVerifier; import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.query.extraction.SubstringDimExtractionFn; import org.apache.druid.query.extraction.SubstringDimExtractionFn;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
public class LikeDimFilterTest public class LikeDimFilterTest extends InitializedNullHandlingTest
{ {
@Test @Test
public void testSerde() throws IOException public void testSerde() throws IOException
@ -70,6 +71,15 @@ public class LikeDimFilterTest
Assert.assertEquals(filter.getRequiredColumns(), Sets.newHashSet("foo")); Assert.assertEquals(filter.getRequiredColumns(), Sets.newHashSet("foo"));
} }
@Test
public void testEqualsContractForExtractionFnDruidPredicateFactory()
{
EqualsVerifier.forClass(LikeDimFilter.LikeMatcher.PatternDruidPredicateFactory.class)
.withNonnullFields("pattern")
.usingGetClass()
.verify();
}
@Test @Test
public void test_LikeMatcher_equals() public void test_LikeMatcher_equals()
{ {

View File

@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.filter;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class SelectorPredicateFactoryTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(SelectorPredicateFactory.class)
.withIgnoredFields("initLock", "longPredicate", "floatPredicate", "doublePredicate")
.usingGetClass()
.verify();
}
}

View File

@ -757,7 +757,15 @@ public class BoundFilterTest extends BaseFilterTest
{ {
EqualsVerifier.forClass(BoundFilter.class) EqualsVerifier.forClass(BoundFilter.class)
.usingGetClass() .usingGetClass()
.withNonnullFields("boundDimFilter", "comparator") .withNonnullFields("boundDimFilter")
.verify();
}
@Test
public void test_equals_boundDimFilterDruidPredicateFactory()
{
EqualsVerifier.forClass(BoundFilter.BoundDimFilterDruidPredicateFactory.class)
.usingGetClass()
.withIgnoredFields("longPredicateSupplier", "floatPredicateSupplier", "doublePredicateSupplier") .withIgnoredFields("longPredicateSupplier", "floatPredicateSupplier", "doublePredicateSupplier")
.verify(); .verify();
} }

View File

@ -22,6 +22,7 @@ package org.apache.druid.segment.filter;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.DimensionsSpec;
@ -192,4 +193,12 @@ public class ColumnComparisonFilterTest extends BaseFilterTest
new ExtractionDimensionSpec("dim1", "dim1", lookupFn) new ExtractionDimensionSpec("dim1", "dim1", lookupFn)
)), ImmutableList.of("2", "5", "7", "8")); )), ImmutableList.of("2", "5", "7", "8"));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(ColumnComparisonFilter.class)
.usingGetClass()
.verify();
}
} }

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.filter;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class DimensionPredicateFilterTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(DimensionPredicateFilter.class)
.withIgnoredFields("predicateFactory")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForDelegatingStringPredicateFactory()
{
EqualsVerifier.forClass(DimensionPredicateFilter.DelegatingStringPredicateFactory.class)
.withIgnoredFields("baseStringPredicate")
.usingGetClass()
.verify();
}
}

View File

@ -23,6 +23,7 @@ import com.google.common.base.Function;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.InputRow;
import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.DimensionsSpec;
@ -279,6 +280,15 @@ public class ExpressionFilterTest extends BaseFilterTest
Assert.assertEquals(edf("missing == ''").getRequiredColumns(), Sets.newHashSet("missing")); Assert.assertEquals(edf("missing == ''").getRequiredColumns(), Sets.newHashSet("missing"));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(ExpressionFilter.class)
.withIgnoredFields("requiredBindings")
.usingGetClass()
.verify();
}
@Test @Test
public void testRequiredColumnRewrite() public void testRequiredColumnRewrite()
{ {

View File

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.filter;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class FalseFilterTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(FalseFilter.class)
.verify();
}
}

View File

@ -388,6 +388,16 @@ public class InFilterTest extends BaseFilterTest
.verify(); .verify();
} }
@Test
public void test_equals_forInFilterDruidPredicateFactory()
{
EqualsVerifier.forClass(InFilter.InFilterDruidPredicateFactory.class)
.usingGetClass()
.withNonnullFields("values")
.withIgnoredFields("longPredicateSupplier", "floatPredicateSupplier", "doublePredicateSupplier")
.verify();
}
private DimFilter toInFilter(String dim) private DimFilter toInFilter(String dim)
{ {
List<String> emptyList = new ArrayList<>(); List<String> emptyList = new ArrayList<>();

View File

@ -22,6 +22,7 @@ package org.apache.druid.segment.filter;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.Pair;
import org.apache.druid.js.JavaScriptConfig; import org.apache.druid.js.JavaScriptConfig;
@ -228,6 +229,14 @@ public class JavaScriptFilterTest extends BaseFilterTest
assertFilterMatchesSkipVectorize(newJavaScriptDimFilter("l0", jsNumericValueFilter("9001"), null), ImmutableList.of("4")); assertFilterMatchesSkipVectorize(newJavaScriptDimFilter("l0", jsNumericValueFilter("9001"), null), ImmutableList.of("4"));
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(JavaScriptFilter.class)
.usingGetClass()
.verify();
}
@Test @Test
public void testRequiredColumnRewrite() public void testRequiredColumnRewrite()
{ {

View File

@ -146,6 +146,25 @@ public class RegexFilterTest extends BaseFilterTest
assertFilterMatches(new RegexDimFilter("dim4", "a.*", changeNullFn), ImmutableList.of()); assertFilterMatches(new RegexDimFilter("dim4", "a.*", changeNullFn), ImmutableList.of());
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(RegexFilter.class)
.withNonnullFields("pattern")
.withIgnoredFields("predicateFactory")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForPatternDruidPredicateFactory()
{
EqualsVerifier.forClass(RegexFilter.PatternDruidPredicateFactory.class)
.withNonnullFields("pattern")
.usingGetClass()
.verify();
}
@Test @Test
public void testRequiredColumnRewrite() public void testRequiredColumnRewrite()
{ {
@ -162,14 +181,4 @@ public class RegexFilterTest extends BaseFilterTest
expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0"); expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0");
filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1")); filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1"));
} }
@Test
public void test_equals()
{
EqualsVerifier.forClass(RegexFilter.class)
.usingGetClass()
.withNonnullFields("dimension", "pattern")
.withIgnoredFields("predicateFactory")
.verify();
}
} }

View File

@ -183,6 +183,23 @@ public class SearchQueryFilterTest extends BaseFilterTest
assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), changeNullFn), ImmutableList.of()); assertFilterMatches(new SearchQueryDimFilter("dim4", specForValue("a"), changeNullFn), ImmutableList.of());
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(SearchQueryFilter.class)
.withIgnoredFields("predicateFactory")
.usingGetClass()
.verify();
}
@Test
public void testEqualsContractForSearchQueryDruidPredicateFactory()
{
EqualsVerifier.forClass(SearchQueryFilter.SearchQueryDruidPredicateFactory.class)
.usingGetClass()
.verify();
}
@Test @Test
public void testRequiredColumnRewrite() public void testRequiredColumnRewrite()
{ {
@ -199,14 +216,4 @@ public class SearchQueryFilterTest extends BaseFilterTest
expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0"); expectedException.expectMessage("Received a non-applicable rewrite: {invalidName=dim1}, filter's dimension: dim0");
filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1")); filter.rewriteRequiredColumns(ImmutableMap.of("invalidName", "dim1"));
} }
@Test
public void test_equals()
{
EqualsVerifier.forClass(SearchQueryFilter.class)
.usingGetClass()
.withNonnullFields("dimension", "query")
.withIgnoredFields("predicateFactory")
.verify();
}
} }

View File

@ -21,6 +21,7 @@ package org.apache.druid.segment.filter;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.collections.spatial.search.RadiusBound; import org.apache.druid.collections.spatial.search.RadiusBound;
import org.apache.druid.collections.spatial.search.RectangularBound; import org.apache.druid.collections.spatial.search.RectangularBound;
import org.apache.druid.data.input.MapBasedInputRow; import org.apache.druid.data.input.MapBasedInputRow;
@ -55,6 +56,7 @@ import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.incremental.IncrementalIndex; import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.incremental.IncrementalIndexSchema;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -72,7 +74,7 @@ import java.util.concurrent.ThreadLocalRandom;
/** /**
*/ */
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class SpatialFilterTest public class SpatialFilterTest extends InitializedNullHandlingTest
{ {
private static IndexMerger INDEX_MERGER = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance()); private static IndexMerger INDEX_MERGER = TestHelper.getTestIndexMergerV9(OffHeapMemorySegmentWriteOutMediumFactory.instance());
private static IndexIO INDEX_IO = TestHelper.getTestIndexIO(); private static IndexIO INDEX_IO = TestHelper.getTestIndexIO();
@ -716,4 +718,16 @@ public class SpatialFilterTest
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(SpatialFilter.class).usingGetClass().verify();
}
@Test
public void testEqualsContractForBoundDruidPredicateFactory()
{
EqualsVerifier.forClass(SpatialFilter.BoundDruidPredicateFactory.class).usingGetClass().verify();
}
} }

View File

@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.filter;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.junit.Test;
public class TrueFilterTest
{
@Test
public void testEqualsContract()
{
EqualsVerifier.forClass(TrueFilter.class)
.verify();
}
}

View File

@ -713,6 +713,13 @@ public class IncrementalIndexStorageAdapterTest extends InitializedNullHandlingT
return Collections.emptySet(); return Collections.emptySet();
} }
@Override
public int hashCode()
{
// Test code, hashcode and equals isn't important
return super.hashCode();
}
private class DictionaryRaceTestFilterDruidPredicateFactory implements DruidPredicateFactory private class DictionaryRaceTestFilterDruidPredicateFactory implements DruidPredicateFactory
{ {
@Override @Override

View File

@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.java.util.common.granularity.Granularities;
@ -2137,7 +2138,6 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
{ {
Filter originalFilter = new SelectorFilter("page", "this matches nothing"); Filter originalFilter = new SelectorFilter("page", "this matches nothing");
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT)); List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryNameUsingIsoCodeLookup(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis( JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses, joinableClauses,
VirtualColumns.EMPTY, VirtualColumns.EMPTY,
@ -2170,4 +2170,40 @@ public class HashJoinSegmentStorageAdapterTest extends BaseHashJoinSegmentStorag
ImmutableList.of() ImmutableList.of()
); );
} }
@Test
public void test_makeCursors_originalFilterDoesNotMatchPreAnalysis_shouldThrowISE()
{
List<JoinableClause> joinableClauses = ImmutableList.of(factToCountryOnIsoCode(JoinType.LEFT));
JoinFilterPreAnalysis preAnalysis = JoinFilterAnalyzer.computeJoinFilterPreAnalysis(
joinableClauses,
VirtualColumns.EMPTY,
null,
true,
true,
true,
QueryContexts.DEFAULT_ENABLE_JOIN_FILTER_REWRITE_MAX_SIZE
);
Filter filter = new SelectorFilter("page", "this matches nothing");
try {
new HashJoinSegmentStorageAdapter(
factSegment.asStorageAdapter(),
joinableClauses,
preAnalysis
).makeCursors(
filter,
Intervals.ETERNITY,
VirtualColumns.EMPTY,
Granularities.ALL,
false,
null
);
Assert.fail();
}
catch (ISE e) {
Assert.assertTrue(e.getMessage().startsWith("Filter provided to cursor ["));
}
}
} }

View File

@ -27,6 +27,7 @@ import org.apache.calcite.tools.ValidationException;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.IAE; import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.JodaUtils;
import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.StringUtils;
@ -10966,6 +10967,83 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
testQuery("SELECT TIME_EXTRACT(__time) FROM druid.foo", ImmutableList.of(), ImmutableList.of()); testQuery("SELECT TIME_EXTRACT(__time) FROM druid.foo", ImmutableList.of(), ImmutableList.of());
} }
@Test
public void testNestedGroupByOnInlineDataSourceWithFilterIsNotSupported() throws Exception
{
try {
testQuery(
"with abc as"
+ "("
+ " SELECT dim1, m2 from druid.foo where \"__time\" >= '2001-01-02'"
+ ")"
+ ", def as"
+ "("
+ " SELECT t1.dim1, SUM(t2.m2) as \"metricSum\" "
+ " from abc as t1 inner join abc as t2 on t1.dim1 = t2.dim1"
+ " where t1.dim1='def'"
+ " group by 1"
+ ")"
+ "SELECT count(*) from def",
ImmutableList.of(
GroupByQuery
.builder()
.setDataSource(
GroupByQuery
.builder()
.setDataSource(
join(
new QueryDataSource(
newScanQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(querySegmentSpec(Intervals.of("2001-01-02T00:00:00.000Z/146140482-04-24T15:36:27.903Z")))
.columns("dim1", "m2")
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
new QueryDataSource(
newScanQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(querySegmentSpec(Intervals.of("2001-01-02T00:00:00.000Z/146140482-04-24T15:36:27.903Z")))
.columns("dim1", "m2")
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.context(QUERY_CONTEXT_DEFAULT)
.build()
),
"j0",
equalsCondition(
DruidExpression.fromColumn("dim1"),
DruidExpression.fromColumn("j0.dim1")
),
JoinType.INNER
)
)
.setGranularity(Granularities.ALL)
.setInterval(querySegmentSpec(Filtration.eternity()))
.build()
)
.setGranularity(Granularities.ALL)
.setInterval(querySegmentSpec(Filtration.eternity()))
.build()
),
ImmutableList.of(new Object[] {1})
);
Assert.fail("Expected an ISE to be thrown");
}
catch (RuntimeException e) {
Throwable cause = e.getCause();
boolean foundISE = false;
while (cause != null) {
if (cause instanceof ISE) {
foundISE = true;
break;
}
cause = cause.getCause();
}
Assert.assertTrue(foundISE);
}
}
@Test @Test
public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception public void testUsingSubqueryAsFilterOnTwoColumns() throws Exception
{ {