Math expressional parameters for aggregator (#2783)

* Supports expression-paramed aggregator (squashed and rebased on master) also includes math post aggregator (was #2820)

* Addressed comments

* addressed comments
This commit is contained in:
Navis Ryu 2016-10-20 03:58:35 +09:00 committed by Himanshu
parent b113a34355
commit 8b7ff4409a
40 changed files with 1419 additions and 368 deletions

View File

@ -23,7 +23,7 @@ package io.druid.common.utils;
*/ */
public class StringUtils extends com.metamx.common.StringUtils public class StringUtils extends com.metamx.common.StringUtils
{ {
public static final String EMPTY = ""; private static final byte[] EMPTY_BYTES = new byte[0];
// should be used only for estimation // should be used only for estimation
// returns the same result with StringUtils.fromUtf8(value).length for valid string values // returns the same result with StringUtils.fromUtf8(value).length for valid string values
@ -46,4 +46,9 @@ public class StringUtils extends com.metamx.common.StringUtils
} }
return length; return length;
} }
public static byte[] toUtf8WithNullToEmpty(final String string)
{
return string == null ? EMPTY_BYTES : toUtf8(string);
}
} }

View File

@ -0,0 +1,43 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.math.expr;
import com.google.common.primitives.Longs;
/**
*/
public class Evals
{
public static Number toNumber(Object value)
{
if (value == null) {
return 0L;
}
if (value instanceof Number) {
return (Number) value;
}
String stringValue = String.valueOf(value);
Long longValue = Longs.tryParse(stringValue);
if (longValue == null) {
return Double.valueOf(stringValue);
}
return longValue;
}
}

View File

@ -22,16 +22,36 @@ package io.druid.math.expr;
import com.google.common.math.LongMath; import com.google.common.math.LongMath;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
*/ */
public interface Expr public interface Expr
{ {
Number eval(Map<String, Number> bindings); Number eval(ObjectBinding bindings);
interface ObjectBinding
{
Number get(String name);
}
void visit(Visitor visitor);
interface Visitor
{
void visit(Expr expr);
}
} }
class LongExpr implements Expr abstract class ConstantExpr implements Expr
{
@Override
public void visit(Visitor visitor)
{
visitor.visit(this);
}
}
class LongExpr extends ConstantExpr
{ {
private final long value; private final long value;
@ -47,13 +67,13 @@ class LongExpr implements Expr
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
return value; return value;
} }
} }
class DoubleExpr implements Expr class DoubleExpr extends ConstantExpr
{ {
private final double value; private final double value;
@ -69,13 +89,13 @@ class DoubleExpr implements Expr
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
return value; return value;
} }
} }
class IdentifierExpr implements Expr class IdentifierExpr extends ConstantExpr
{ {
private final String value; private final String value;
@ -91,7 +111,7 @@ class IdentifierExpr implements Expr
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number val = bindings.get(value); Number val = bindings.get(value);
if (val == null) { if (val == null) {
@ -104,8 +124,8 @@ class IdentifierExpr implements Expr
class FunctionExpr implements Expr class FunctionExpr implements Expr
{ {
private final String name; final String name;
private final List<Expr> args; final List<Expr> args;
public FunctionExpr(String name, List<Expr> args) public FunctionExpr(String name, List<Expr> args)
{ {
@ -120,23 +140,47 @@ class FunctionExpr implements Expr
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
return Parser.func.get(name.toLowerCase()).apply(args, bindings); return Parser.func.get(name.toLowerCase()).apply(args, bindings);
} }
@Override
public void visit(Visitor visitor)
{
for (Expr child : args) {
child.visit(visitor);
}
visitor.visit(this);
}
} }
class UnaryMinusExpr implements Expr abstract class UnaryExpr implements Expr
{ {
private final Expr expr; final Expr expr;
UnaryMinusExpr(Expr expr) UnaryExpr(Expr expr)
{ {
this.expr = expr; this.expr = expr;
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public void visit(Visitor visitor)
{
expr.visit(visitor);
visitor.visit(this);
}
}
class UnaryMinusExpr extends UnaryExpr
{
UnaryMinusExpr(Expr expr)
{
super(expr);
}
@Override
public Number eval(ObjectBinding bindings)
{ {
Number valObj = expr.eval(bindings); Number valObj = expr.eval(bindings);
if (valObj instanceof Long) { if (valObj instanceof Long) {
@ -146,6 +190,13 @@ class UnaryMinusExpr implements Expr
} }
} }
@Override
public void visit(Visitor visitor)
{
expr.visit(visitor);
visitor.visit(this);
}
@Override @Override
public String toString() public String toString()
{ {
@ -153,17 +204,15 @@ class UnaryMinusExpr implements Expr
} }
} }
class UnaryNotExpr implements Expr class UnaryNotExpr extends UnaryExpr
{ {
private final Expr expr;
UnaryNotExpr(Expr expr) UnaryNotExpr(Expr expr)
{ {
this.expr = expr; super(expr);
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number valObj = expr.eval(bindings); Number valObj = expr.eval(bindings);
return valObj.doubleValue() > 0 ? 0.0d : 1.0d; return valObj.doubleValue() > 0 ? 0.0d : 1.0d;
@ -194,6 +243,14 @@ abstract class BinaryOpExprBase implements Expr
return left instanceof Long && right instanceof Long; return left instanceof Long && right instanceof Long;
} }
@Override
public void visit(Visitor visitor)
{
left.visit(visitor);
right.visit(visitor);
visitor.visit(this);
}
@Override @Override
public String toString() public String toString()
{ {
@ -210,7 +267,7 @@ class BinMinusExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -231,7 +288,7 @@ class BinPowExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -252,7 +309,7 @@ class BinMulExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -273,7 +330,7 @@ class BinDivExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -294,7 +351,7 @@ class BinModuloExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -315,7 +372,7 @@ class BinPlusExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -336,7 +393,7 @@ class BinLtExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -357,7 +414,7 @@ class BinLeqExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -378,7 +435,7 @@ class BinGtExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -399,7 +456,7 @@ class BinGeqExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -420,7 +477,7 @@ class BinEqExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -441,7 +498,7 @@ class BinNeqExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -462,7 +519,7 @@ class BinAndExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);
@ -495,7 +552,7 @@ class BinOrExpr extends BinaryOpExprBase
} }
@Override @Override
public Number eval(Map<String, Number> bindings) public Number eval(ObjectBinding bindings)
{ {
Number leftVal = left.eval(bindings); Number leftVal = left.eval(bindings);
Number rightVal = right.eval(bindings); Number rightVal = right.eval(bindings);

View File

@ -20,7 +20,6 @@
package io.druid.math.expr; package io.druid.math.expr;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
*/ */
@ -28,12 +27,12 @@ interface Function
{ {
String name(); String name();
Number apply(List<Expr> args, Map<String, Number> bindings); Number apply(List<Expr> args, Expr.ObjectBinding bindings);
abstract class SingleParam implements Function abstract class SingleParam implements Function
{ {
@Override @Override
public Number apply(List<Expr> args, Map<String, Number> bindings) public Number apply(List<Expr> args, Expr.ObjectBinding bindings)
{ {
if (args.size() != 1) { if (args.size() != 1) {
throw new RuntimeException("function '" + name() + "' needs 1 argument"); throw new RuntimeException("function '" + name() + "' needs 1 argument");
@ -48,7 +47,7 @@ interface Function
abstract class DoubleParam implements Function abstract class DoubleParam implements Function
{ {
@Override @Override
public Number apply(List<Expr> args, Map<String, Number> bindings) public Number apply(List<Expr> args, Expr.ObjectBinding bindings)
{ {
if (args.size() != 2) { if (args.size() != 2) {
throw new RuntimeException("function '" + name() + "' needs 1 argument"); throw new RuntimeException("function '" + name() + "' needs 1 argument");
@ -616,7 +615,7 @@ interface Function
} }
@Override @Override
public Number apply(List<Expr> args, Map<String, Number> bindings) public Number apply(List<Expr> args, Expr.ObjectBinding bindings)
{ {
if (args.size() != 3) { if (args.size() != 3) {
throw new RuntimeException("function 'if' needs 3 argument"); throw new RuntimeException("function 'if' needs 3 argument");

View File

@ -20,9 +20,12 @@
package io.druid.math.expr; package io.druid.math.expr;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import com.google.common.collect.Lists;
import io.druid.math.expr.antlr.ExprLexer; import io.druid.math.expr.antlr.ExprLexer;
import io.druid.math.expr.antlr.ExprParser; import io.druid.math.expr.antlr.ExprParser;
import org.antlr.v4.runtime.ANTLRInputStream; import org.antlr.v4.runtime.ANTLRInputStream;
@ -31,7 +34,9 @@ import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker; import org.antlr.v4.runtime.tree.ParseTreeWalker;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
public class Parser public class Parser
{ {
@ -66,4 +71,52 @@ public class Parser
walker.walk(listener, parseTree); walker.walk(listener, parseTree);
return listener.getAST(); return listener.getAST();
} }
public static List<String> findRequiredBindings(String in)
{
return findRequiredBindings(parse(in));
}
public static List<String> findRequiredBindings(Expr expr)
{
final Set<String> found = Sets.newLinkedHashSet();
expr.visit(
new Expr.Visitor()
{
@Override
public void visit(Expr expr)
{
if (expr instanceof IdentifierExpr) {
found.add(expr.toString());
}
}
}
);
return Lists.newArrayList(found);
}
public static Expr.ObjectBinding withMap(final Map<String, ?> bindings)
{
return new Expr.ObjectBinding()
{
@Override
public Number get(String name)
{
return (Number)bindings.get(name);
}
};
}
public static Expr.ObjectBinding withSuppliers(final Map<String, Supplier<Number>> bindings)
{
return new Expr.ObjectBinding()
{
@Override
public Number get(String name)
{
Supplier<Number> supplier = bindings.get(name);
return supplier == null ? null : supplier.get();
}
};
}
} }

View File

@ -19,6 +19,7 @@
package io.druid.math.expr; package io.druid.math.expr;
import com.google.common.base.Supplier;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -29,81 +30,103 @@ import java.util.Map;
*/ */
public class EvalTest public class EvalTest
{ {
private Supplier<Number> constantSupplier(final Number number)
{
return new Supplier<Number>()
{
@Override
public Number get()
{
return number;
}
};
}
@Test @Test
public void testDoubleEval() public void testDoubleEval()
{ {
Map<String, Number> bindings = new HashMap<>(); Map<String, Supplier<Number>> bindings = new HashMap<>();
bindings.put("x", 2.0d); bindings.put( "x", constantSupplier(2.0d));
Assert.assertEquals(2.0, Parser.parse("x").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(2.0, evaluate("x", bindings).doubleValue(), 0.0001);
Assert.assertFalse(Parser.parse("1.0 && 0.0").eval(bindings).doubleValue() > 0.0); Assert.assertFalse(evaluate("1.0 && 0.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("1.0 && 2.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("1.0 && 2.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("1.0 || 0.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("1.0 || 0.0", bindings).doubleValue() > 0.0);
Assert.assertFalse(Parser.parse("0.0 || 0.0").eval(bindings).doubleValue() > 0.0); Assert.assertFalse(evaluate("0.0 || 0.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("2.0 > 1.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("2.0 > 1.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("2.0 >= 2.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("2.0 >= 2.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("1.0 < 2.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("1.0 < 2.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("2.0 <= 2.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("2.0 <= 2.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("2.0 == 2.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("2.0 == 2.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("2.0 != 1.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("2.0 != 1.0", bindings).doubleValue() > 0.0);
Assert.assertEquals(3.5, Parser.parse("2.0 + 1.5").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(3.5, evaluate("2.0 + 1.5", bindings).doubleValue(), 0.0001);
Assert.assertEquals(0.5, Parser.parse("2.0 - 1.5").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(0.5, evaluate("2.0 - 1.5", bindings).doubleValue(), 0.0001);
Assert.assertEquals(3.0, Parser.parse("2.0 * 1.5").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(3.0, evaluate("2.0 * 1.5", bindings).doubleValue(), 0.0001);
Assert.assertEquals(4.0, Parser.parse("2.0 / 0.5").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(4.0, evaluate("2.0 / 0.5", bindings).doubleValue(), 0.0001);
Assert.assertEquals(0.2, Parser.parse("2.0 % 0.3").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(0.2, evaluate("2.0 % 0.3", bindings).doubleValue(), 0.0001);
Assert.assertEquals(8.0, Parser.parse("2.0 ^ 3.0").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(8.0, evaluate("2.0 ^ 3.0", bindings).doubleValue(), 0.0001);
Assert.assertEquals(-1.5, Parser.parse("-1.5").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(-1.5, evaluate("-1.5", bindings).doubleValue(), 0.0001);
Assert.assertTrue(Parser.parse("!-1.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("!-1.0", bindings).doubleValue() > 0.0);
Assert.assertTrue(Parser.parse("!0.0").eval(bindings).doubleValue() > 0.0); Assert.assertTrue(evaluate("!0.0", bindings).doubleValue() > 0.0);
Assert.assertFalse(Parser.parse("!2.0").eval(bindings).doubleValue() > 0.0); Assert.assertFalse(evaluate("!2.0", bindings).doubleValue() > 0.0);
Assert.assertEquals(2.0, Parser.parse("sqrt(4.0)").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(2.0, evaluate("sqrt(4.0)", bindings).doubleValue(), 0.0001);
Assert.assertEquals(2.0, Parser.parse("if(1.0, 2.0, 3.0)").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(2.0, evaluate("if(1.0, 2.0, 3.0)", bindings).doubleValue(), 0.0001);
Assert.assertEquals(3.0, Parser.parse("if(0.0, 2.0, 3.0)").eval(bindings).doubleValue(), 0.0001); Assert.assertEquals(3.0, evaluate("if(0.0, 2.0, 3.0)", bindings).doubleValue(), 0.0001);
}
private Number evaluate(String in, Map<String, Supplier<Number>> bindings) {
return Parser.parse(in).eval(Parser.withSuppliers(bindings));
} }
@Test @Test
public void testLongEval() public void testLongEval()
{ {
Map<String, Number> bindings = new HashMap<>(); Map<String, Supplier<Number>> bindings = new HashMap<>();
bindings.put("x", 9223372036854775807L); bindings.put("x", constantSupplier(9223372036854775807L));
Assert.assertEquals(9223372036854775807L, Parser.parse("x").eval(bindings).longValue()); Assert.assertEquals(9223372036854775807L, evaluate("x", bindings).longValue());
Assert.assertFalse(Parser.parse("9223372036854775807 && 0").eval(bindings).longValue() > 0); Assert.assertFalse(evaluate("9223372036854775807 && 0", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 && 9223372036854775806").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 && 9223372036854775806", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 || 0").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 || 0", bindings).longValue() > 0);
Assert.assertFalse(Parser.parse("-9223372036854775807 || -9223372036854775807").eval(bindings).longValue() > 0); Assert.assertFalse(evaluate("-9223372036854775807 || -9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("-9223372036854775807 || 9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("-9223372036854775807 || 9223372036854775807", bindings).longValue() > 0);
Assert.assertFalse(Parser.parse("0 || 0").eval(bindings).longValue() > 0); Assert.assertFalse(evaluate("0 || 0", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 > 9223372036854775806").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 > 9223372036854775806", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 >= 9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 >= 9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775806 < 9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775806 < 9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 <= 9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 <= 9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 == 9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 == 9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("9223372036854775807 != 9223372036854775806").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("9223372036854775807 != 9223372036854775806", bindings).longValue() > 0);
Assert.assertEquals(9223372036854775807L, Parser.parse("9223372036854775806 + 1").eval(bindings).longValue()); Assert.assertEquals(9223372036854775807L, evaluate("9223372036854775806 + 1", bindings).longValue());
Assert.assertEquals(9223372036854775806L, Parser.parse("9223372036854775807 - 1").eval(bindings).longValue()); Assert.assertEquals(9223372036854775806L, evaluate("9223372036854775807 - 1", bindings).longValue());
Assert.assertEquals(9223372036854775806L, Parser.parse("4611686018427387903 * 2").eval(bindings).longValue()); Assert.assertEquals(9223372036854775806L, evaluate("4611686018427387903 * 2", bindings).longValue());
Assert.assertEquals(4611686018427387903L, Parser.parse("9223372036854775806 / 2").eval(bindings).longValue()); Assert.assertEquals(4611686018427387903L, evaluate("9223372036854775806 / 2", bindings).longValue());
Assert.assertEquals(7L, Parser.parse("9223372036854775807 % 9223372036854775800").eval(bindings).longValue()); Assert.assertEquals(7L, evaluate("9223372036854775807 % 9223372036854775800", bindings).longValue());
Assert.assertEquals( 9223372030926249001L, Parser.parse("3037000499 ^ 2").eval(bindings).longValue()); Assert.assertEquals( 9223372030926249001L, evaluate("3037000499 ^ 2", bindings).longValue());
Assert.assertEquals(-9223372036854775807L, Parser.parse("-9223372036854775807").eval(bindings).longValue()); Assert.assertEquals(-9223372036854775807L, evaluate("-9223372036854775807", bindings).longValue());
Assert.assertTrue(Parser.parse("!-9223372036854775807").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("!-9223372036854775807", bindings).longValue() > 0);
Assert.assertTrue(Parser.parse("!0").eval(bindings).longValue() > 0); Assert.assertTrue(evaluate("!0", bindings).longValue() > 0);
Assert.assertFalse(Parser.parse("!9223372036854775807").eval(bindings).longValue() > 0); Assert.assertFalse(evaluate("!9223372036854775807", bindings).longValue() > 0);
Assert.assertEquals(3037000499L, Parser.parse("sqrt(9223372036854775807)").eval(bindings).longValue()); Assert.assertEquals(3037000499L, evaluate("sqrt(9223372036854775807)", bindings).longValue());
Assert.assertEquals(9223372036854775807L, Parser.parse("if(9223372036854775807, 9223372036854775807, 9223372036854775806)").eval(bindings).longValue()); Assert.assertEquals(9223372036854775807L, evaluate(
Assert.assertEquals(9223372036854775806L, Parser.parse("if(0, 9223372036854775807, 9223372036854775806)").eval(bindings).longValue()); "if(9223372036854775807, 9223372036854775807, 9223372036854775806)",
bindings
).longValue());
Assert.assertEquals(9223372036854775806L, evaluate(
"if(0, 9223372036854775807, 9223372036854775806)",
bindings
).longValue());
} }
} }

View File

@ -70,88 +70,47 @@ public class ParserTest
Assert.assertEquals(expected, actual); Assert.assertEquals(expected, actual);
} }
private void validateParser(String expression, String expected, String identifiers)
{
Assert.assertEquals(expected, Parser.parse(expression).toString());
Assert.assertEquals(identifiers, Parser.findRequiredBindings(expression).toString());
}
@Test @Test
public void testSimpleLogicalOps1() public void testSimpleLogicalOps1()
{ {
String actual = Parser.parse("x>y").toString(); validateParser("x>y", "(> x y)", "[x, y]");
String expected = "(> x y)"; validateParser("x<y", "(< x y)", "[x, y]");
Assert.assertEquals(expected, actual); validateParser("x<=y", "(<= x y)", "[x, y]");
validateParser("x>=y", "(>= x y)", "[x, y]");
actual = Parser.parse("x<y").toString(); validateParser("x==y", "(== x y)", "[x, y]");
expected = "(< x y)"; validateParser("x!=y", "(!= x y)", "[x, y]");
Assert.assertEquals(expected, actual); validateParser("x && y", "(&& x y)", "[x, y]");
validateParser("x || y", "(|| x y)", "[x, y]");
actual = Parser.parse("x<=y").toString();
expected = "(<= x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x>=y").toString();
expected = "(>= x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x==y").toString();
expected = "(== x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x!=y").toString();
expected = "(!= x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x && y").toString();
expected = "(&& x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x || y").toString();
expected = "(|| x y)";
Assert.assertEquals(expected, actual);
} }
@Test @Test
public void testSimpleAdditivityOp1() public void testSimpleAdditivityOp1()
{ {
String actual = Parser.parse("x+y").toString(); validateParser("x+y", "(+ x y)", "[x, y]");
String expected = "(+ x y)"; validateParser("x-y", "(- x y)", "[x, y]");
Assert.assertEquals(expected, actual);
actual = Parser.parse("x-y").toString();
expected = "(- x y)";
Assert.assertEquals(expected, actual);
} }
@Test @Test
public void testSimpleAdditivityOp2() public void testSimpleAdditivityOp2()
{ {
String actual = Parser.parse("x+y+z").toString(); validateParser("x+y+z", "(+ (+ x y) z)", "[x, y, z]");
String expected = "(+ (+ x y) z)"; validateParser("x+y-z", "(- (+ x y) z)", "[x, y, z]");
Assert.assertEquals(expected, actual); validateParser("x-y+z", "(+ (- x y) z)", "[x, y, z]");
validateParser("x-y-z", "(- (- x y) z)", "[x, y, z]");
actual = Parser.parse("x+y-z").toString();
expected = "(- (+ x y) z)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x-y+z").toString();
expected = "(+ (- x y) z)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x-y-z").toString();
expected = "(- (- x y) z)";
Assert.assertEquals(expected, actual);
} }
@Test @Test
public void testSimpleMultiplicativeOp1() public void testSimpleMultiplicativeOp1()
{ {
String actual = Parser.parse("x*y").toString(); validateParser("x*y", "(* x y)", "[x, y]");
String expected = "(* x y)"; validateParser("x/y", "(/ x y)", "[x, y]");
Assert.assertEquals(expected, actual); validateParser("x%y", "(% x y)", "[x, y]");
actual = Parser.parse("x/y").toString();
expected = "(/ x y)";
Assert.assertEquals(expected, actual);
actual = Parser.parse("x%y").toString();
expected = "(% x y)";
Assert.assertEquals(expected, actual);
} }
@Test @Test
@ -255,12 +214,7 @@ public class ParserTest
@Test @Test
public void testFunctions() public void testFunctions()
{ {
String actual = Parser.parse("sqrt(x)").toString(); validateParser("sqrt(x)", "(sqrt [x])", "[x]");
String expected = "(sqrt [x])"; validateParser("if(cond,then,else)", "(if [cond, then, else])", "[cond, then, else]");
Assert.assertEquals(expected, actual);
actual = Parser.parse("if(cond,then,else)").toString();
expected = "(if [cond, then, else])";
Assert.assertEquals(expected, actual);
} }
} }

View File

@ -43,6 +43,7 @@ import io.druid.query.aggregation.post.ArithmeticPostAggregator;
import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator;
import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator;
import io.druid.query.aggregation.post.JavaScriptPostAggregator; import io.druid.query.aggregation.post.JavaScriptPostAggregator;
import io.druid.query.aggregation.post.ExpressionPostAggregator;
import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.serde.ComplexMetrics;
/** /**
@ -82,6 +83,7 @@ public class AggregatorsModule extends SimpleModule
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(value = { @JsonSubTypes(value = {
@JsonSubTypes.Type(name = "expression", value = ExpressionPostAggregator.class),
@JsonSubTypes.Type(name = "arithmetic", value = ArithmeticPostAggregator.class), @JsonSubTypes.Type(name = "arithmetic", value = ArithmeticPostAggregator.class),
@JsonSubTypes.Type(name = "fieldAccess", value = FieldAccessPostAggregator.class), @JsonSubTypes.Type(name = "fieldAccess", value = FieldAccessPostAggregator.class),
@JsonSubTypes.Type(name = "constant", value = ConstantPostAggregator.class), @JsonSubTypes.Type(name = "constant", value = ConstantPostAggregator.class),

View File

@ -21,6 +21,10 @@ package io.druid.query.aggregation;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.metamx.common.Pair; import com.metamx.common.Pair;
import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
@ -84,4 +88,50 @@ public class AggregatorUtil
} }
return new Pair(condensedAggs, condensedPostAggs); return new Pair(condensedAggs, condensedPostAggs);
} }
public static FloatColumnSelector getFloatColumnSelector(
ColumnSelectorFactory metricFactory,
String fieldName,
String fieldExpression
)
{
if (fieldName != null && fieldExpression == null) {
return metricFactory.makeFloatColumnSelector(fieldName);
}
if (fieldName == null && fieldExpression != null) {
final NumericColumnSelector numeric = metricFactory.makeMathExpressionSelector(fieldExpression);
return new FloatColumnSelector()
{
@Override
public float get()
{
return numeric.get().floatValue();
}
};
}
throw new IllegalArgumentException("Must have a valid, non-null fieldName or expression");
}
public static LongColumnSelector getLongColumnSelector(
ColumnSelectorFactory metricFactory,
String fieldName,
String fieldExpression
)
{
if (fieldName != null && fieldExpression == null) {
return metricFactory.makeLongColumnSelector(fieldName);
}
if (fieldName == null && fieldExpression != null) {
final NumericColumnSelector numeric = metricFactory.makeMathExpressionSelector(fieldExpression);
return new LongColumnSelector()
{
@Override
public long get()
{
return numeric.get().longValue();
}
};
}
throw new IllegalArgumentException("Must have a valid, non-null fieldName or expression");
}
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.FloatColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,32 +40,48 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0x3; private static final byte CACHE_TYPE_ID = 0x3;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public DoubleMaxAggregatorFactory( public DoubleMaxAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") final String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression"
);
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public DoubleMaxAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new DoubleMaxAggregator(name, metricFactory.makeFloatColumnSelector(fieldName)); return new DoubleMaxAggregator(name, getFloatColumnSelector(metricFactory));
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new DoubleMaxBufferAggregator(metricFactory.makeFloatColumnSelector(fieldName)); return new DoubleMaxBufferAggregator(getFloatColumnSelector(metricFactory));
}
private FloatColumnSelector getFloatColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getFloatColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -80,7 +99,7 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new DoubleMaxAggregatorFactory(name, name); return new DoubleMaxAggregatorFactory(name, name, null);
} }
@Override @Override
@ -96,7 +115,7 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new DoubleMaxAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new DoubleMaxAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -121,6 +140,12 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -131,15 +156,21 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -165,6 +196,7 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
{ {
return "DoubleMaxAggregatorFactory{" + return "DoubleMaxAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -181,10 +213,13 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
DoubleMaxAggregatorFactory that = (DoubleMaxAggregatorFactory) o; DoubleMaxAggregatorFactory that = (DoubleMaxAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -195,6 +230,7 @@ public class DoubleMaxAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.FloatColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,32 +40,48 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0x4; private static final byte CACHE_TYPE_ID = 0x4;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public DoubleMinAggregatorFactory( public DoubleMinAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") final String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression"
);
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public DoubleMinAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new DoubleMinAggregator(name, metricFactory.makeFloatColumnSelector(fieldName)); return new DoubleMinAggregator(name, getFloatColumnSelector(metricFactory));
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new DoubleMinBufferAggregator(metricFactory.makeFloatColumnSelector(fieldName)); return new DoubleMinBufferAggregator(getFloatColumnSelector(metricFactory));
}
private FloatColumnSelector getFloatColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getFloatColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -80,7 +99,7 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new DoubleMinAggregatorFactory(name, name); return new DoubleMinAggregatorFactory(name, name, null);
} }
@Override @Override
@ -96,7 +115,7 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new DoubleMinAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new DoubleMinAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -121,6 +140,12 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -131,15 +156,21 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -165,6 +196,7 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
{ {
return "DoubleMinAggregatorFactory{" + return "DoubleMinAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -181,10 +213,13 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
DoubleMinAggregatorFactory that = (DoubleMinAggregatorFactory) o; DoubleMinAggregatorFactory that = (DoubleMinAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -195,6 +230,7 @@ public class DoubleMinAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles; import com.google.common.primitives.Doubles;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.FloatColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,35 +40,48 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0x2; private static final byte CACHE_TYPE_ID = 0x2;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public DoubleSumAggregatorFactory( public DoubleSumAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression"
);
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public DoubleSumAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new DoubleSumAggregator( return new DoubleSumAggregator(name, getFloatColumnSelector(metricFactory));
name,
metricFactory.makeFloatColumnSelector(fieldName)
);
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new DoubleSumBufferAggregator(metricFactory.makeFloatColumnSelector(fieldName)); return new DoubleSumBufferAggregator(getFloatColumnSelector(metricFactory));
}
private FloatColumnSelector getFloatColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getFloatColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -83,7 +99,7 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new DoubleSumAggregatorFactory(name, name); return new DoubleSumAggregatorFactory(name, name, null);
} }
@Override @Override
@ -99,7 +115,7 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new DoubleSumAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new DoubleSumAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -124,6 +140,12 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -134,15 +156,21 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -168,6 +196,7 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
{ {
return "DoubleSumAggregatorFactory{" + return "DoubleSumAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -184,10 +213,13 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
DoubleSumAggregatorFactory that = (DoubleSumAggregatorFactory) o; DoubleSumAggregatorFactory that = (DoubleSumAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -198,6 +230,7 @@ public class DoubleSumAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.LongColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,32 +40,47 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0xA; private static final byte CACHE_TYPE_ID = 0xA;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public LongMaxAggregatorFactory( public LongMaxAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") final String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression");
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public LongMaxAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new LongMaxAggregator(name, metricFactory.makeLongColumnSelector(fieldName)); return new LongMaxAggregator(name, getLongColumnSelector(metricFactory));
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new LongMaxBufferAggregator(metricFactory.makeLongColumnSelector(fieldName)); return new LongMaxBufferAggregator(getLongColumnSelector(metricFactory));
}
private LongColumnSelector getLongColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getLongColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -80,7 +98,7 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new LongMaxAggregatorFactory(name, name); return new LongMaxAggregatorFactory(name, name, null);
} }
@Override @Override
@ -96,7 +114,7 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new LongMaxAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new LongMaxAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -117,6 +135,12 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -127,15 +151,21 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -161,6 +191,7 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
{ {
return "LongMaxAggregatorFactory{" + return "LongMaxAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -177,10 +208,13 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
LongMaxAggregatorFactory that = (LongMaxAggregatorFactory) o; LongMaxAggregatorFactory that = (LongMaxAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -191,6 +225,7 @@ public class LongMaxAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.LongColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,32 +40,47 @@ public class LongMinAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0xB; private static final byte CACHE_TYPE_ID = 0xB;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public LongMinAggregatorFactory( public LongMinAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") final String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression");
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public LongMinAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new LongMinAggregator(name, metricFactory.makeLongColumnSelector(fieldName)); return new LongMinAggregator(name, getLongColumnSelector(metricFactory));
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new LongMinBufferAggregator(metricFactory.makeLongColumnSelector(fieldName)); return new LongMinBufferAggregator(getLongColumnSelector(metricFactory));
}
private LongColumnSelector getLongColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getLongColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -80,7 +98,7 @@ public class LongMinAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new LongMinAggregatorFactory(name, name); return new LongMinAggregatorFactory(name, name, null);
} }
@Override @Override
@ -96,7 +114,7 @@ public class LongMinAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new LongMinAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new LongMinAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -117,6 +135,12 @@ public class LongMinAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -127,15 +151,21 @@ public class LongMinAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -161,6 +191,7 @@ public class LongMinAggregatorFactory extends AggregatorFactory
{ {
return "LongMinAggregatorFactory{" + return "LongMinAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -177,10 +208,13 @@ public class LongMinAggregatorFactory extends AggregatorFactory
LongMinAggregatorFactory that = (LongMinAggregatorFactory) o; LongMinAggregatorFactory that = (LongMinAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -191,6 +225,7 @@ public class LongMinAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -23,13 +23,16 @@ import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import com.metamx.common.StringUtils; import io.druid.common.utils.StringUtils;
import io.druid.math.expr.Parser;
import io.druid.segment.ColumnSelectorFactory; import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.LongColumnSelector;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
*/ */
@ -37,35 +40,47 @@ public class LongSumAggregatorFactory extends AggregatorFactory
{ {
private static final byte CACHE_TYPE_ID = 0x1; private static final byte CACHE_TYPE_ID = 0x1;
private final String fieldName;
private final String name; private final String name;
private final String fieldName;
private final String expression;
@JsonCreator @JsonCreator
public LongSumAggregatorFactory( public LongSumAggregatorFactory(
@JsonProperty("name") String name, @JsonProperty("name") String name,
@JsonProperty("fieldName") final String fieldName @JsonProperty("fieldName") String fieldName,
@JsonProperty("expression") String expression
) )
{ {
Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name"); Preconditions.checkNotNull(name, "Must have a valid, non-null aggregator name");
Preconditions.checkNotNull(fieldName, "Must have a valid, non-null fieldName"); Preconditions.checkArgument(
fieldName == null ^ expression == null,
"Must have a valid, non-null fieldName or expression");
this.name = name; this.name = name;
this.fieldName = fieldName; this.fieldName = fieldName;
this.expression = expression;
}
public LongSumAggregatorFactory(String name, String fieldName)
{
this(name, fieldName, null);
} }
@Override @Override
public Aggregator factorize(ColumnSelectorFactory metricFactory) public Aggregator factorize(ColumnSelectorFactory metricFactory)
{ {
return new LongSumAggregator( return new LongSumAggregator(name, getLongColumnSelector(metricFactory));
name,
metricFactory.makeLongColumnSelector(fieldName)
);
} }
@Override @Override
public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory)
{ {
return new LongSumBufferAggregator(metricFactory.makeLongColumnSelector(fieldName)); return new LongSumBufferAggregator(getLongColumnSelector(metricFactory));
}
private LongColumnSelector getLongColumnSelector(ColumnSelectorFactory metricFactory)
{
return AggregatorUtil.getLongColumnSelector(metricFactory, fieldName, expression);
} }
@Override @Override
@ -83,7 +98,7 @@ public class LongSumAggregatorFactory extends AggregatorFactory
@Override @Override
public AggregatorFactory getCombiningFactory() public AggregatorFactory getCombiningFactory()
{ {
return new LongSumAggregatorFactory(name, name); return new LongSumAggregatorFactory(name, name, null);
} }
@Override @Override
@ -99,7 +114,7 @@ public class LongSumAggregatorFactory extends AggregatorFactory
@Override @Override
public List<AggregatorFactory> getRequiredColumns() public List<AggregatorFactory> getRequiredColumns()
{ {
return Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory(fieldName, fieldName)); return Arrays.<AggregatorFactory>asList(new LongSumAggregatorFactory(fieldName, fieldName, expression));
} }
@Override @Override
@ -120,6 +135,12 @@ public class LongSumAggregatorFactory extends AggregatorFactory
return fieldName; return fieldName;
} }
@JsonProperty
public String getExpression()
{
return expression;
}
@Override @Override
@JsonProperty @JsonProperty
public String getName() public String getName()
@ -130,15 +151,21 @@ public class LongSumAggregatorFactory extends AggregatorFactory
@Override @Override
public List<String> requiredFields() public List<String> requiredFields()
{ {
return Arrays.asList(fieldName); return fieldName != null ? Arrays.asList(fieldName) : Parser.findRequiredBindings(expression);
} }
@Override @Override
public byte[] getCacheKey() public byte[] getCacheKey()
{ {
byte[] fieldNameBytes = StringUtils.toUtf8(fieldName); byte[] fieldNameBytes = StringUtils.toUtf8WithNullToEmpty(fieldName);
byte[] expressionBytes = StringUtils.toUtf8WithNullToEmpty(expression);
return ByteBuffer.allocate(1 + fieldNameBytes.length).put(CACHE_TYPE_ID).put(fieldNameBytes).array(); return ByteBuffer.allocate(2 + fieldNameBytes.length + expressionBytes.length)
.put(CACHE_TYPE_ID)
.put(fieldNameBytes)
.put(AggregatorUtil.STRING_SEPARATOR)
.put(expressionBytes)
.array();
} }
@Override @Override
@ -164,6 +191,7 @@ public class LongSumAggregatorFactory extends AggregatorFactory
{ {
return "LongSumAggregatorFactory{" + return "LongSumAggregatorFactory{" +
"fieldName='" + fieldName + '\'' + "fieldName='" + fieldName + '\'' +
", expression='" + expression + '\'' +
", name='" + name + '\'' + ", name='" + name + '\'' +
'}'; '}';
} }
@ -180,10 +208,13 @@ public class LongSumAggregatorFactory extends AggregatorFactory
LongSumAggregatorFactory that = (LongSumAggregatorFactory) o; LongSumAggregatorFactory that = (LongSumAggregatorFactory) o;
if (fieldName != null ? !fieldName.equals(that.fieldName) : that.fieldName != null) { if (!Objects.equals(fieldName, that.fieldName)) {
return false; return false;
} }
if (name != null ? !name.equals(that.name) : that.name != null) { if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false; return false;
} }
@ -194,6 +225,7 @@ public class LongSumAggregatorFactory extends AggregatorFactory
public int hashCode() public int hashCode()
{ {
int result = fieldName != null ? fieldName.hashCode() : 0; int result = fieldName != null ? fieldName.hashCode() : 0;
result = 31 * result + (expression != null ? expression.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0); result = 31 * result + (name != null ? name.hashCode() : 0);
return result; return result;
} }

View File

@ -0,0 +1,195 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.aggregation.post;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import io.druid.math.expr.Expr;
import io.druid.math.expr.Parser;
import io.druid.query.aggregation.PostAggregator;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
*/
public class ExpressionPostAggregator implements PostAggregator
{
private static final Comparator<Number> DEFAULT_COMPARATOR = new Comparator<Number>()
{
@Override
public int compare(Number o1, Number o2)
{
if (o1 instanceof Long && o2 instanceof Long) {
return Long.compare(o1.longValue(), o2.longValue());
}
return Double.compare(o1.doubleValue(), o2.doubleValue());
}
};
private final String name;
private final String expression;
private final Comparator comparator;
private final String ordering;
private final Expr parsed;
private final List<String> dependentFields;
@JsonCreator
public ExpressionPostAggregator(
@JsonProperty("name") String name,
@JsonProperty("expression") String expression,
@JsonProperty("ordering") String ordering
)
{
Preconditions.checkArgument(expression != null, "expression cannot be null");
this.name = name;
this.expression = expression;
this.ordering = ordering;
this.comparator = ordering == null ? DEFAULT_COMPARATOR : Ordering.valueOf(ordering);
this.parsed = Parser.parse(expression);
this.dependentFields = Parser.findRequiredBindings(parsed);
}
public ExpressionPostAggregator(String name, String fnName)
{
this(name, fnName, null);
}
@Override
public Set<String> getDependentFields()
{
return Sets.newHashSet(dependentFields);
}
@Override
public Comparator getComparator()
{
return comparator;
}
@Override
public Object compute(Map<String, Object> values)
{
return parsed.eval(Parser.withMap(values));
}
@Override
@JsonProperty
public String getName()
{
return name;
}
@JsonProperty("expression")
public String getExpression()
{
return expression;
}
@JsonProperty("ordering")
public String getOrdering()
{
return ordering;
}
@Override
public String toString()
{
return "ExpressionPostAggregator{" +
"name='" + name + '\'' +
", expression='" + expression + '\'' +
", ordering=" + ordering +
'}';
}
public static enum Ordering implements Comparator<Number>
{
// ensures the following order: numeric > NaN > Infinite
numericFirst {
public int compare(Number lhs, Number rhs)
{
if (lhs instanceof Long && rhs instanceof Long) {
return Long.compare(lhs.longValue(), rhs.longValue());
}
double d1 = lhs.doubleValue();
double d2 = rhs.doubleValue();
if (isFinite(d1) && !isFinite(d2)) {
return 1;
}
if (!isFinite(d1) && isFinite(d2)) {
return -1;
}
return Double.compare(d1, d2);
}
// Double.isFinite only exist in JDK8
private boolean isFinite(double value)
{
return !Double.isInfinite(value) && !Double.isNaN(value);
}
}
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ExpressionPostAggregator that = (ExpressionPostAggregator) o;
if (!comparator.equals(that.comparator)) {
return false;
}
if (!Objects.equals(name, that.name)) {
return false;
}
if (!Objects.equals(expression, that.expression)) {
return false;
}
if (!Objects.equals(ordering, that.ordering)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = name != null ? name.hashCode() : 0;
result = 31 * result + expression.hashCode();
result = 31 * result + comparator.hashCode();
result = 31 * result + (ordering != null ? ordering.hashCode() : 0);
return result;
}
}

View File

@ -402,6 +402,40 @@ public class GroupByQuery extends BaseQuery<Row>
); );
} }
public GroupByQuery withAggregatorSpecs(final List<AggregatorFactory> aggregatorSpecs)
{
return new GroupByQuery(
getDataSource(),
getQuerySegmentSpec(),
getDimFilter(),
getGranularity(),
getDimensions(),
aggregatorSpecs,
getPostAggregatorSpecs(),
getHavingSpec(),
getLimitSpec(),
limitFn,
getContext()
);
}
public GroupByQuery withPostAggregatorSpecs(final List<PostAggregator> postAggregatorSpecs)
{
return new GroupByQuery(
getDataSource(),
getQuerySegmentSpec(),
getDimFilter(),
getGranularity(),
getDimensions(),
getAggregatorSpecs(),
postAggregatorSpecs,
getHavingSpec(),
getLimitSpec(),
limitFn,
getContext()
);
}
public static class Builder public static class Builder
{ {
private DataSource dataSource; private DataSource dataSource;

View File

@ -25,6 +25,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.primitives.Chars; import com.google.common.primitives.Chars;
@ -35,6 +36,9 @@ import com.metamx.common.guava.Accumulator;
import io.druid.data.input.MapBasedRow; import io.druid.data.input.MapBasedRow;
import io.druid.data.input.Row; import io.druid.data.input.Row;
import io.druid.granularity.AllGranularity; import io.druid.granularity.AllGranularity;
import io.druid.math.expr.Evals;
import io.druid.math.expr.Expr;
import io.druid.math.expr.Parser;
import io.druid.query.QueryInterruptedException; import io.druid.query.QueryInterruptedException;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
@ -46,6 +50,7 @@ import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector; import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.Column; import io.druid.segment.column.Column;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
@ -649,6 +654,38 @@ public class RowBasedGrouperHelper
}; };
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
final Expr parsed = Parser.parse(expression);
final List<String> required = Parser.findRequiredBindings(parsed);
final Map<String, Supplier<Number>> values = Maps.newHashMapWithExpectedSize(required.size());
for (final String columnName : required) {
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return Evals.toNumber(row.get().getRaw(columnName));
}
}
);
}
final Expr.ObjectBinding binding = Parser.withSuppliers(values);
return new NumericColumnSelector()
{
@Override
public Number get()
{
return parsed.eval(binding);
}
};
}
@Override @Override
public ColumnCapabilities getColumnCapabilities(String columnName) public ColumnCapabilities getColumnCapabilities(String columnName)
{ {

View File

@ -169,7 +169,8 @@ public class TopNQuery extends BaseQuery<Result<TopNResultValue>>
); );
} }
public TopNQuery withDimensionSpec(DimensionSpec spec){ public TopNQuery withDimensionSpec(DimensionSpec spec)
{
return new TopNQuery( return new TopNQuery(
getDataSource(), getDataSource(),
spec, spec,
@ -183,7 +184,25 @@ public class TopNQuery extends BaseQuery<Result<TopNResultValue>>
getContext() getContext()
); );
} }
public TopNQuery withPostAggregatorSpecs(List<PostAggregator> postAggregatorSpecs){
public TopNQuery withAggregatorSpecs(List<AggregatorFactory> aggregatorSpecs)
{
return new TopNQuery(
getDataSource(),
getDimensionSpec(),
topNMetricSpec,
threshold,
getQuerySegmentSpec(),
dimFilter,
granularity,
aggregatorSpecs,
postAggregatorSpecs,
getContext()
);
}
public TopNQuery withPostAggregatorSpecs(List<PostAggregator> postAggregatorSpecs)
{
return new TopNQuery( return new TopNQuery(
getDataSource(), getDataSource(),
getDimensionSpec(), getDimensionSpec(),

View File

@ -18,10 +18,12 @@
*/ */
package io.druid.segment;import io.druid.segment.column.Column; package io.druid.segment;import io.druid.segment.column.Column;
import io.druid.segment.data.Indexed;
/** /**
*/ */
public interface ColumnSelector public interface ColumnSelector
{ {
public Indexed<String> getColumnNames();
public Column getColumn(String columnName); public Column getColumn(String columnName);
} }

View File

@ -31,5 +31,6 @@ public interface ColumnSelectorFactory
public FloatColumnSelector makeFloatColumnSelector(String columnName); public FloatColumnSelector makeFloatColumnSelector(String columnName);
public LongColumnSelector makeLongColumnSelector(String columnName); public LongColumnSelector makeLongColumnSelector(String columnName);
public ObjectColumnSelector makeObjectColumnSelector(String columnName); public ObjectColumnSelector makeObjectColumnSelector(String columnName);
public NumericColumnSelector makeMathExpressionSelector(String expression);
public ColumnCapabilities getColumnCapabilities(String columnName); public ColumnCapabilities getColumnCapabilities(String columnName);
} }

View File

@ -0,0 +1,27 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment;
/**
*/
public interface NumericColumnSelector
{
Number get();
}

View File

@ -33,7 +33,6 @@ public interface QueryableIndex extends ColumnSelector, Closeable
{ {
public Interval getDataInterval(); public Interval getDataInterval();
public int getNumRows(); public int getNumRows();
public Indexed<String> getColumnNames();
public Indexed<String> getAvailableDimensions(); public Indexed<String> getAvailableDimensions();
public BitmapFactory getBitmapFactoryForDimensions(); public BitmapFactory getBitmapFactoryForDimensions();
public Metadata getMetadata(); public Metadata getMetadata();

View File

@ -23,6 +23,7 @@ import com.google.common.base.Function;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.base.Predicates; import com.google.common.base.Predicates;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
@ -32,6 +33,8 @@ import com.metamx.common.guava.CloseQuietly;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.math.expr.Expr;
import io.druid.math.expr.Parser;
import io.druid.query.QueryInterruptedException; import io.druid.query.QueryInterruptedException;
import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
@ -804,6 +807,59 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
}; };
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
final Expr parsed = Parser.parse(expression);
final List<String> required = Parser.findRequiredBindings(parsed);
final Map<String, Supplier<Number>> values = Maps.newHashMapWithExpectedSize(required.size());
for (String columnName : index.getColumnNames()) {
if (!required.contains(columnName)) {
continue;
}
final GenericColumn column = index.getColumn(columnName).getGenericColumn();
if (column == null) {
continue;
}
if (column.getType() == ValueType.FLOAT) {
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return column.getFloatSingleValueRow(cursorOffset.getOffset());
}
}
);
} else if (column.getType() == ValueType.LONG) {
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return column.getLongSingleValueRow(cursorOffset.getOffset());
}
}
);
} else {
throw new UnsupportedOperationException(
"Not supported type " + column.getType() + " for column " + columnName
);
}
}
final Expr.ObjectBinding binding = Parser.withSuppliers(values);
return new NumericColumnSelector() {
@Override
public Number get()
{
return parsed.eval(binding);
}
};
}
@Override @Override
public ColumnCapabilities getColumnCapabilities(String columnName) public ColumnCapabilities getColumnCapabilities(String columnName)
{ {
@ -947,6 +1003,7 @@ public class QueryableIndexStorageAdapter implements StorageAdapter
} }
}; };
} }
} }
} }
), ),

View File

@ -39,6 +39,9 @@ import io.druid.data.input.impl.DimensionSchema;
import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.DimensionsSpec;
import io.druid.data.input.impl.SpatialDimensionSchema; import io.druid.data.input.impl.SpatialDimensionSchema;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.math.expr.Evals;
import io.druid.math.expr.Expr;
import io.druid.math.expr.Parser;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.PostAggregator;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
@ -51,6 +54,7 @@ import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.Metadata; import io.druid.segment.Metadata;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.Column; import io.druid.segment.column.Column;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
@ -291,6 +295,38 @@ public abstract class IncrementalIndex<AggregatorType> implements Iterable<Row>,
} }
}; };
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
final Expr parsed = Parser.parse(expression);
final List<String> required = Parser.findRequiredBindings(parsed);
final Map<String, Supplier<Number>> values = Maps.newHashMapWithExpectedSize(required.size());
for (final String columnName : required) {
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return Evals.toNumber(in.get().getRaw(columnName));
}
}
);
}
final Expr.ObjectBinding binding = Parser.withSuppliers(values);
return new NumericColumnSelector()
{
@Override
public Number get()
{
return parsed.eval(binding);
}
};
}
}; };
} }

View File

@ -21,12 +21,16 @@ package io.druid.segment.incremental;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.metamx.common.guava.Sequence; import com.metamx.common.guava.Sequence;
import com.metamx.common.guava.Sequences; import com.metamx.common.guava.Sequences;
import io.druid.granularity.QueryGranularity; import io.druid.granularity.QueryGranularity;
import io.druid.math.expr.Expr;
import io.druid.math.expr.Parser;
import io.druid.query.QueryInterruptedException; import io.druid.query.QueryInterruptedException;
import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
@ -45,6 +49,7 @@ import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.Metadata; import io.druid.segment.Metadata;
import io.druid.segment.NullDimensionSelector; import io.druid.segment.NullDimensionSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.SingleScanTimeDimSelector; import io.druid.segment.SingleScanTimeDimSelector;
import io.druid.segment.StorageAdapter; import io.druid.segment.StorageAdapter;
@ -60,6 +65,7 @@ import org.joda.time.Interval;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
@ -68,10 +74,10 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
{ {
private static final NullDimensionSelector NULL_DIMENSION_SELECTOR = new NullDimensionSelector(); private static final NullDimensionSelector NULL_DIMENSION_SELECTOR = new NullDimensionSelector();
private final IncrementalIndex index; private final IncrementalIndex<?> index;
public IncrementalIndexStorageAdapter( public IncrementalIndexStorageAdapter(
IncrementalIndex index IncrementalIndex<?> index
) )
{ {
this.index = index; this.index = index;
@ -537,6 +543,55 @@ public class IncrementalIndexStorageAdapter implements StorageAdapter
{ {
return index.getCapabilities(columnName); return index.getCapabilities(columnName);
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
final Expr parsed = Parser.parse(expression);
final List<String> required = Parser.findRequiredBindings(parsed);
final Map<String, Supplier<Number>> values = Maps.newHashMapWithExpectedSize(required.size());
for (String columnName : index.getMetricNames()) {
if (!required.contains(columnName)) {
continue;
}
ValueType type = index.getCapabilities(columnName).getType();
if (type == ValueType.FLOAT) {
final int metricIndex = index.getMetricIndex(columnName);
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return index.getMetricFloatValue(currEntry.getValue(), metricIndex);
}
}
);
} else if (type == ValueType.LONG) {
final int metricIndex = index.getMetricIndex(columnName);
values.put(
columnName, new Supplier<Number>()
{
@Override
public Number get()
{
return index.getMetricLongValue(currEntry.getValue(), metricIndex);
}
}
);
}
}
final Expr.ObjectBinding binding = Parser.withSuppliers(values);
return new NumericColumnSelector() {
@Override
public Number get()
{
return parsed.eval(binding);
}
};
}
}; };
} }
} }

View File

@ -20,7 +20,6 @@
package io.druid.segment.incremental; package io.druid.segment.incremental;
import com.google.common.base.Supplier; import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.metamx.common.logger.Logger; import com.metamx.common.logger.Logger;
import com.metamx.common.parsers.ParseException; import com.metamx.common.parsers.ParseException;
@ -33,10 +32,10 @@ import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector; import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
@ -403,6 +402,12 @@ public class OnheapIncrementalIndex extends IncrementalIndex<Aggregator>
{ {
return delegate.getColumnCapabilities(columnName); return delegate.getColumnCapabilities(columnName);
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
return delegate.makeMathExpressionSelector(expression);
}
} }
} }

View File

@ -40,6 +40,7 @@ import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector; import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
import io.druid.segment.column.ColumnCapabilitiesImpl; import io.druid.segment.column.ColumnCapabilitiesImpl;
@ -184,6 +185,12 @@ public class FilteredAggregatorTest
} }
return caps; return caps;
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
throw new UnsupportedOperationException();
}
}; };
} }

View File

@ -28,6 +28,7 @@ import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector; import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
import org.junit.Assert; import org.junit.Assert;
@ -76,6 +77,12 @@ public class JavaScriptAggregatorTest
{ {
return null; return null;
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
return null;
}
}; };
static { static {

View File

@ -40,6 +40,7 @@ public class ArithmeticPostAggregatorTest
public void testCompute() public void testCompute()
{ {
ArithmeticPostAggregator arithmeticPostAggregator; ArithmeticPostAggregator arithmeticPostAggregator;
ExpressionPostAggregator expressionPostAggregator;
CountAggregator agg = new CountAggregator("rows"); CountAggregator agg = new CountAggregator("rows");
agg.aggregate(); agg.aggregate();
agg.aggregate(); agg.aggregate();
@ -57,17 +58,29 @@ public class ArithmeticPostAggregatorTest
) )
); );
for (PostAggregator postAggregator : postAggregatorList) {
metricValues.put(postAggregator.getName(), postAggregator.compute(metricValues));
}
arithmeticPostAggregator = new ArithmeticPostAggregator("add", "+", postAggregatorList); arithmeticPostAggregator = new ArithmeticPostAggregator("add", "+", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku + rows");
Assert.assertEquals(9.0, arithmeticPostAggregator.compute(metricValues)); Assert.assertEquals(9.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(9.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("subtract", "-", postAggregatorList); arithmeticPostAggregator = new ArithmeticPostAggregator("subtract", "-", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku - rows");
Assert.assertEquals(3.0, arithmeticPostAggregator.compute(metricValues)); Assert.assertEquals(3.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(3.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("multiply", "*", postAggregatorList); arithmeticPostAggregator = new ArithmeticPostAggregator("multiply", "*", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku * rows");
Assert.assertEquals(18.0, arithmeticPostAggregator.compute(metricValues)); Assert.assertEquals(18.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(18.0, expressionPostAggregator.compute(metricValues));
arithmeticPostAggregator = new ArithmeticPostAggregator("divide", "/", postAggregatorList); arithmeticPostAggregator = new ArithmeticPostAggregator("divide", "/", postAggregatorList);
expressionPostAggregator = new ExpressionPostAggregator("add", "roku / rows");
Assert.assertEquals(2.0, arithmeticPostAggregator.compute(metricValues)); Assert.assertEquals(2.0, arithmeticPostAggregator.compute(metricValues));
Assert.assertEquals(2.0, expressionPostAggregator.compute(metricValues));
} }
@Test @Test

View File

@ -48,6 +48,7 @@ import io.druid.query.DruidProcessingConfig;
import io.druid.query.Druids; import io.druid.query.Druids;
import io.druid.query.FinalizeResultsQueryRunner; import io.druid.query.FinalizeResultsQueryRunner;
import io.druid.query.Query; import io.druid.query.Query;
import io.druid.query.QueryDataSource;
import io.druid.query.QueryRunner; import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerTestHelper; import io.druid.query.QueryRunnerTestHelper;
import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChest;
@ -66,6 +67,7 @@ import io.druid.query.aggregation.hyperloglog.HyperUniqueFinalizingPostAggregato
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ArithmeticPostAggregator;
import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator;
import io.druid.query.aggregation.post.ExpressionPostAggregator;
import io.druid.query.aggregation.post.FieldAccessPostAggregator; import io.druid.query.aggregation.post.FieldAccessPostAggregator;
import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
@ -2044,7 +2046,7 @@ public class GroupByQueryRunnerTest
) )
) )
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setLimit(Integer.valueOf(limit)); .setLimit(limit);
final GroupByQuery fullQuery = builder.build(); final GroupByQuery fullQuery = builder.build();
@ -2087,7 +2089,7 @@ public class GroupByQueryRunnerTest
.setLimit(limit) .setLimit(limit)
.addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING); .addOrderByColumn("idx", OrderByColumnSpec.Direction.DESCENDING);
final GroupByQuery fullQuery = builder.build(); GroupByQuery fullQuery = builder.build();
List<Row> expectedResults = Arrays.asList( List<Row> expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 2900L), GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 2900L),
@ -2113,8 +2115,39 @@ public class GroupByQueryRunnerTest
TestHelper.assertExpectedObjects( TestHelper.assertExpectedObjects(
Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit) Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit)
); );
}
builder.setAggregatorSpecs(
Arrays.asList(
QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", null, "index * 2 + indexMin / 10")
)
);
fullQuery = builder.build();
expectedResults = Arrays.asList(
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "premium", "rows", 3L, "idx", 6090L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "mezzanine", "rows", 3L, "idx", 6030L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "entertainment", "rows", 1L, "idx", 333L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "automotive", "rows", 1L, "idx", 285L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "news", "rows", 1L, "idx", 255L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "health", "rows", 1L, "idx", 252L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "travel", "rows", 1L, "idx", 251L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "business", "rows", 1L, "idx", 248L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "alias", "technology", "rows", 1L, "idx", 165L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "premium", "rows", 3L, "idx", 5262L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "mezzanine", "rows", 3L, "idx", 5141L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "entertainment", "rows", 1L, "idx", 348L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "automotive", "rows", 1L, "idx", 309L),
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "alias", "travel", "rows", 1L, "idx", 265L)
);
mergeRunner = factory.getToolchest().mergeResults(runner);
TestHelper.assertExpectedObjects(
Iterables.limit(expectedResults, limit), mergeRunner.run(fullQuery, context), String.format("limit: %d", limit)
);
}
@Test(expected = IllegalArgumentException.class) @Test(expected = IllegalArgumentException.class)
public void testMergeResultsWithNegativeLimit() public void testMergeResultsWithNegativeLimit()
@ -2131,7 +2164,7 @@ public class GroupByQueryRunnerTest
) )
) )
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)) .setGranularity(new PeriodGranularity(new Period("P1M"), null, null))
.setLimit(Integer.valueOf(-1)); .setLimit(-1);
builder.build(); builder.build();
} }
@ -2282,6 +2315,32 @@ public class GroupByQueryRunnerTest
TestHelper.assertExpectedObjects( TestHelper.assertExpectedObjects(
Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited" Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited"
); );
builder.limit(Integer.MAX_VALUE)
.setAggregatorSpecs(
Arrays.asList(
QueryRunnerTestHelper.rowsCount,
new DoubleSumAggregatorFactory("idx", null, "index / 2 + indexMin")
)
);
expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(
new String[]{"__time", "alias", "rows", "idx"},
new Object[]{"2011-04-01", "travel", 2L, 365.4876403808594D},
new Object[]{"2011-04-01", "technology", 2L, 267.3737487792969D},
new Object[]{"2011-04-01", "news", 2L, 333.3147277832031D},
new Object[]{"2011-04-01", "health", 2L, 325.467529296875D},
new Object[]{"2011-04-01", "entertainment", 2L, 479.916015625D},
new Object[]{"2011-04-01", "business", 2L, 328.083740234375D},
new Object[]{"2011-04-01", "automotive", 2L, 405.5966796875D},
new Object[]{"2011-04-01", "premium", 6L, 6627.927734375D},
new Object[]{"2011-04-01", "mezzanine", 6L, 6635.47998046875D}
);
TestHelper.assertExpectedObjects(expectedResults, mergeRunner.run(builder.build(), context), "no-limit");
TestHelper.assertExpectedObjects(
Iterables.limit(expectedResults, 5), mergeRunner.run(builder.limit(5).build(), context), "limited"
);
} }
@Test @Test
@ -2342,90 +2401,19 @@ public class GroupByQueryRunnerTest
.addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING) .addOrderByColumn("alias", OrderByColumnSpec.Direction.DESCENDING)
.setGranularity(new PeriodGranularity(new Period("P1M"), null, null)); .setGranularity(new PeriodGranularity(new Period("P1M"), null, null));
final GroupByQuery query = builder.build(); GroupByQuery query = builder.build();
List<Row> expectedResults = Arrays.asList( List<Row> expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(
GroupByQueryRunnerTestHelper.createExpectedRow( new String[]{"__time", "alias", "rows", "idx"},
"2011-04-01", new Object[]{"2011-04-01", "mezzanine", 6L, 4423.6533203125D},
"alias", new Object[]{"2011-04-01", "premium", 6L, 4418.61865234375D},
"mezzanine", new Object[]{"2011-04-01", "entertainment", 2L, 319.94403076171875D},
"rows", new Object[]{"2011-04-01", "automotive", 2L, 270.3977966308594D},
6L, new Object[]{"2011-04-01", "travel", 2L, 243.65843200683594D},
"idx", new Object[]{"2011-04-01", "news", 2L, 222.20980834960938D},
4423.6533203125D new Object[]{"2011-04-01", "business", 2L, 218.7224884033203D},
), new Object[]{"2011-04-01", "health", 2L, 216.97836303710938D},
GroupByQueryRunnerTestHelper.createExpectedRow( new Object[]{"2011-04-01", "technology", 2L, 178.24917602539062D}
"2011-04-01",
"alias",
"premium",
"rows",
6L,
"idx",
4418.61865234375D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"entertainment",
"rows",
2L,
"idx",
319.94403076171875D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"automotive",
"rows",
2L,
"idx",
270.3977966308594D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"travel",
"rows",
2L,
"idx",
243.65843200683594D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"news",
"rows",
2L,
"idx",
222.20980834960938D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"business",
"rows",
2L,
"idx",
218.7224884033203D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"health",
"rows",
2L,
"idx",
216.97836303710938D
),
GroupByQueryRunnerTestHelper.createExpectedRow(
"2011-04-01",
"alias",
"technology",
"rows",
2L,
"idx",
178.24917602539062D
)
); );
Map<String, Object> context = Maps.newHashMap(); Map<String, Object> context = Maps.newHashMap();
@ -3270,7 +3258,7 @@ public class GroupByQueryRunnerTest
) )
); );
final GroupByQuery fullQuery = builder.build(); GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults( QueryRunner mergedRunner = factory.getToolchest().mergeResults(
new QueryRunner<Row>() new QueryRunner<Row>()
@ -3379,7 +3367,7 @@ public class GroupByQueryRunnerTest
) )
); );
final GroupByQuery fullQuery = builder.build(); GroupByQuery fullQuery = builder.build();
QueryRunner mergedRunner = factory.getToolchest().mergeResults( QueryRunner mergedRunner = factory.getToolchest().mergeResults(
new QueryRunner<Row>() new QueryRunner<Row>()
@ -3417,6 +3405,22 @@ public class GroupByQueryRunnerTest
).run(fullQuery, context), ).run(fullQuery, context),
"merged" "merged"
); );
fullQuery = fullQuery.withPostAggregatorSpecs(
Arrays.<PostAggregator>asList(
new ExpressionPostAggregator("rows_times_10", "rows * 10.0")
)
);
TestHelper.assertExpectedObjects(
expectedResults,
factory.getToolchest().postMergeQueryDecoration(
factory.getToolchest().mergeResults(
factory.getToolchest().preMergeQueryDecoration(mergedRunner)
)
).run(fullQuery, context),
"merged"
);
} }
@Test @Test
@ -3582,7 +3586,8 @@ public class GroupByQueryRunnerTest
.setAggregatorSpecs( .setAggregatorSpecs(
Arrays.asList( Arrays.asList(
QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", "index") new LongSumAggregatorFactory("idx", "index"),
new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
) )
) )
.setGranularity(QueryRunnerTestHelper.dayGran) .setGranularity(QueryRunnerTestHelper.dayGran)
@ -3646,7 +3651,8 @@ public class GroupByQueryRunnerTest
.setAggregatorSpecs( .setAggregatorSpecs(
Arrays.asList( Arrays.asList(
QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", "index") new LongSumAggregatorFactory("idx", "index"),
new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
) )
) )
.setGranularity(QueryRunnerTestHelper.dayGran) .setGranularity(QueryRunnerTestHelper.dayGran)
@ -3719,7 +3725,8 @@ public class GroupByQueryRunnerTest
.setAggregatorSpecs( .setAggregatorSpecs(
Arrays.asList( Arrays.asList(
QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", "index") new LongSumAggregatorFactory("idx", "index"),
new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
) )
) )
.setGranularity(QueryRunnerTestHelper.dayGran) .setGranularity(QueryRunnerTestHelper.dayGran)
@ -3773,7 +3780,8 @@ public class GroupByQueryRunnerTest
.setAggregatorSpecs( .setAggregatorSpecs(
Arrays.asList( Arrays.asList(
QueryRunnerTestHelper.rowsCount, QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", "index") new LongSumAggregatorFactory("idx", "index"),
new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
) )
) )
.setGranularity(QueryRunnerTestHelper.dayGran) .setGranularity(QueryRunnerTestHelper.dayGran)
@ -3785,19 +3793,44 @@ public class GroupByQueryRunnerTest
.setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird) .setQuerySegmentSpec(QueryRunnerTestHelper.firstToThird)
.setAggregatorSpecs( .setAggregatorSpecs(
Arrays.<AggregatorFactory>asList( Arrays.<AggregatorFactory>asList(
new DoubleMaxAggregatorFactory("idx", "idx") QueryRunnerTestHelper.rowsCount,
new DoubleMaxAggregatorFactory("idx", "idx"),
new DoubleMaxAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
) )
) )
.setGranularity(QueryRunnerTestHelper.dayGran) .setGranularity(QueryRunnerTestHelper.dayGran)
.build(); .build();
List<Row> expectedResults = Arrays.asList( List<Row> expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-01", "idx", 2900.0), new String[]{"__time", "rows", "idx", "indexMaxPlusTen"},
GroupByQueryRunnerTestHelper.createExpectedRow("2011-04-02", "idx", 2505.0) new Object[]{"2011-04-01", 9L, 2900.0, 2930.0},
new Object[]{"2011-04-02", 9L, 2505.0, 2535.0}
); );
Iterable<Row> results = GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); TestHelper.assertExpectedObjects(
TestHelper.assertExpectedObjects(expectedResults, results, ""); expectedResults,
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query), ""
);
subquery = subquery.withAggregatorSpecs(
Arrays.asList(
QueryRunnerTestHelper.rowsCount,
new LongSumAggregatorFactory("idx", null, "-index + 100"),
new LongSumAggregatorFactory("indexMaxPlusTen", "indexMaxPlusTen")
)
);
query = (GroupByQuery) query.withDataSource(new QueryDataSource(subquery));
expectedResults = GroupByQueryRunnerTestHelper.createExpectedRows(
new String[]{"__time", "rows", "idx", "indexMaxPlusTen"},
new Object[]{"2011-04-01", 9L, 21.0, 2930.0},
new Object[]{"2011-04-02", 9L, 2.0, 2535.0}
);
TestHelper.assertExpectedObjects(
expectedResults,
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query), ""
);
} }
@Test @Test

View File

@ -31,8 +31,11 @@ import io.druid.query.Query;
import io.druid.query.QueryRunner; import io.druid.query.QueryRunner;
import io.druid.query.QueryRunnerFactory; import io.druid.query.QueryRunnerFactory;
import io.druid.query.QueryToolChest; import io.druid.query.QueryToolChest;
import io.druid.segment.column.Column;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import java.util.Arrays;
import java.util.List;
import java.util.Map; import java.util.Map;
/** /**
@ -70,4 +73,23 @@ public class GroupByQueryRunnerTestHelper
return new MapBasedRow(ts, theVals); return new MapBasedRow(ts, theVals);
} }
public static List<Row> createExpectedRows(String[] columnNames, Object[]... values)
{
int timeIndex = Arrays.asList(columnNames).indexOf(Column.TIME_COLUMN_NAME);
Preconditions.checkArgument(timeIndex >= 0);
List<Row> expected = Lists.newArrayList();
for (Object[] value : values) {
Preconditions.checkArgument(value.length == columnNames.length);
Map<String, Object> theVals = Maps.newHashMapWithExpectedSize(value.length);
for (int i = 0; i < columnNames.length; i++) {
if (i != timeIndex) {
theVals.put(columnNames[i], value[i]);
}
}
expected.add(new MapBasedRow(new DateTime(value[timeIndex]), theVals));
}
return expected;
}
} }

View File

@ -25,6 +25,7 @@ import io.druid.segment.ColumnSelectorFactory;
import io.druid.segment.DimensionSelector; import io.druid.segment.DimensionSelector;
import io.druid.segment.FloatColumnSelector; import io.druid.segment.FloatColumnSelector;
import io.druid.segment.LongColumnSelector; import io.druid.segment.LongColumnSelector;
import io.druid.segment.NumericColumnSelector;
import io.druid.segment.ObjectColumnSelector; import io.druid.segment.ObjectColumnSelector;
import io.druid.segment.column.ColumnCapabilities; import io.druid.segment.column.ColumnCapabilities;
@ -88,6 +89,12 @@ public class TestColumnSelectorFactory implements ColumnSelectorFactory
}; };
} }
@Override
public NumericColumnSelector makeMathExpressionSelector(String expression)
{
throw new UnsupportedOperationException("expression is not supported in current context");
}
@Override @Override
public ColumnCapabilities getColumnCapabilities(String columnName) public ColumnCapabilities getColumnCapabilities(String columnName)
{ {

View File

@ -33,6 +33,7 @@ import io.druid.query.aggregation.LongSumAggregatorFactory;
import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.PostAggregator;
import io.druid.query.aggregation.post.ArithmeticPostAggregator; import io.druid.query.aggregation.post.ArithmeticPostAggregator;
import io.druid.query.aggregation.post.ConstantPostAggregator; import io.druid.query.aggregation.post.ConstantPostAggregator;
import io.druid.query.aggregation.post.ExpressionPostAggregator;
import io.druid.query.dimension.DefaultDimensionSpec; import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec; import io.druid.query.dimension.DimensionSpec;
import io.druid.query.ordering.StringComparators; import io.druid.query.ordering.StringComparators;
@ -251,8 +252,19 @@ public class DefaultLimitSpecTest
) )
); );
Assert.assertEquals( Assert.assertEquals(
ImmutableList.of(testRowsList.get(2), testRowsList.get(0)), (List)ImmutableList.of(testRowsList.get(2), testRowsList.get(0)),
Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>()) (List)Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>())
);
// makes same result
limitFn = limitSpec.build(
ImmutableList.<DimensionSpec>of(new DefaultDimensionSpec("k1", "k1")),
ImmutableList.<AggregatorFactory>of(new LongSumAggregatorFactory("k2", "k2")),
ImmutableList.<PostAggregator>of(new ExpressionPostAggregator("k1", "1 + 1"))
);
Assert.assertEquals(
(List)ImmutableList.of(testRowsList.get(2), testRowsList.get(0)),
(List)Sequences.toList(limitFn.apply(testRowsSequence), new ArrayList<Row>())
); );
} }

View File

@ -194,7 +194,7 @@ public class SegmentMetadataQueryTest
null, null,
null null
) )
), mmap1 ? 71982 : 72755, ), mmap1 ? 93744 : 94517,
1209, 1209,
null, null,
null, null,
@ -238,7 +238,7 @@ public class SegmentMetadataQueryTest
null null
) )
// null_column will be included only for incremental index, which makes a little bigger result than expected // null_column will be included only for incremental index, which makes a little bigger result than expected
), mmap2 ? 71982 : 72755, ), mmap2 ? 93744 : 94517,
1209, 1209,
null, null,
null, null,

View File

@ -110,7 +110,7 @@ public class SegmentMetadataUnionQueryTest
null null
) )
), ),
mmap ? 287928 : 291020, mmap ? 374976 : 378068,
4836, 4836,
null, null,
null, null,

View File

@ -156,9 +156,9 @@ public class SelectQueryRunnerTest
PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId); PagingOffset offset = query.getPagingOffset(QueryRunnerTestHelper.segmentId);
List<Result<SelectResultValue>> expectedResults = toExpected( List<Result<SelectResultValue>> expectedResults = toExpected(
toEvents(new String[]{EventHolder.timestampKey + ":TIME"}, V_0112_0114), toFullEvents(V_0112_0114),
Lists.newArrayList("market", "quality", "placement", "placementish", "partial_null_column", "null_column"), Lists.newArrayList("market", "quality", "placement", "placementish", "partial_null_column", "null_column"),
Lists.<String>newArrayList("index", "quality_uniques"), Lists.newArrayList("index", "quality_uniques", "indexMin", "indexMaxPlusTen"),
offset.startOffset(), offset.startOffset(),
offset.threshold() offset.threshold()
); );
@ -247,7 +247,7 @@ public class SelectQueryRunnerTest
new SelectResultValue( new SelectResultValue(
ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2), ImmutableMap.of(QueryRunnerTestHelper.segmentId, 2),
Sets.newHashSet("mar", "qual", "place"), Sets.newHashSet("mar", "qual", "place"),
Sets.newHashSet("index", "quality_uniques"), Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen"),
Arrays.asList( Arrays.asList(
new EventHolder( new EventHolder(
QueryRunnerTestHelper.segmentId, QueryRunnerTestHelper.segmentId,
@ -293,7 +293,7 @@ public class SelectQueryRunnerTest
new SelectResultValue( new SelectResultValue(
ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3), ImmutableMap.of(QueryRunnerTestHelper.segmentId, -3),
Sets.newHashSet("mar", "qual", "place"), Sets.newHashSet("mar", "qual", "place"),
Sets.newHashSet("index", "quality_uniques"), Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen"),
Arrays.asList( Arrays.asList(
new EventHolder( new EventHolder(
QueryRunnerTestHelper.segmentId, QueryRunnerTestHelper.segmentId,
@ -554,7 +554,7 @@ public class SelectQueryRunnerTest
new SelectResultValue( new SelectResultValue(
ImmutableMap.<String, Integer>of(), ImmutableMap.<String, Integer>of(),
Sets.newHashSet("market", "quality", "placement", "placementish", "partial_null_column", "null_column"), Sets.newHashSet("market", "quality", "placement", "placementish", "partial_null_column", "null_column"),
Sets.newHashSet("index", "quality_uniques"), Sets.newHashSet("index", "quality_uniques", "indexMin", "indexMaxPlusTen"),
Lists.<EventHolder>newArrayList() Lists.<EventHolder>newArrayList()
) )
) )
@ -605,6 +605,18 @@ public class SelectQueryRunnerTest
); );
} }
private List<List<Map<String, Object>>> toFullEvents(final String[]... valueSet)
{
return toEvents(new String[]{EventHolder.timestampKey + ":TIME",
QueryRunnerTestHelper.marketDimension + ":STRING",
QueryRunnerTestHelper.qualityDimension + ":STRING",
QueryRunnerTestHelper.placementDimension + ":STRING",
QueryRunnerTestHelper.placementishDimension + ":STRINGS",
QueryRunnerTestHelper.indexMetric + ":FLOAT",
QueryRunnerTestHelper.partialNullDimension + ":STRING"},
valueSet);
}
private List<List<Map<String, Object>>> toEvents(final String[] dimSpecs, final String[]... valueSet) private List<List<Map<String, Object>>> toEvents(final String[] dimSpecs, final String[]... valueSet)
{ {
List<List<Map<String, Object>>> events = Lists.newArrayList(); List<List<Map<String, Object>>> events = Lists.newArrayList();
@ -620,17 +632,19 @@ public class SelectQueryRunnerTest
Map<String, Object> event = Maps.newHashMap(); Map<String, Object> event = Maps.newHashMap();
String[] values = input.split("\\t"); String[] values = input.split("\\t");
for (int i = 0; i < dimSpecs.length; i++) { for (int i = 0; i < dimSpecs.length; i++) {
if (dimSpecs[i] == null || i >= dimSpecs.length) { if (dimSpecs[i] == null || i >= dimSpecs.length || i >= values.length) {
continue; continue;
} }
String[] specs = dimSpecs[i].split(":"); String[] specs = dimSpecs[i].split(":");
event.put( event.put(
specs[0], specs[0],
specs.length == 1 || specs[1].equals("STRING") ? values[i] :
specs[1].equals("TIME") ? new DateTime(values[i]) : specs[1].equals("TIME") ? new DateTime(values[i]) :
specs[1].equals("FLOAT") ? Float.valueOf(values[i]) : specs[1].equals("FLOAT") ? Float.valueOf(values[i]) :
specs[1].equals("DOUBLE") ? Double.valueOf(values[i]) : specs[1].equals("DOUBLE") ? Double.valueOf(values[i]) :
specs[1].equals("LONG") ? Long.valueOf(values[i]) : specs[1].equals("LONG") ? Long.valueOf(values[i]) :
specs[1].equals("NULL") ? null : specs[1].equals("NULL") ? null :
specs[1].equals("STRINGS") ? Arrays.asList(values[i].split("\u0001")) :
values[i] values[i]
); );
} }

View File

@ -45,6 +45,7 @@ import io.druid.query.TestQueryRunners;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory;
import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.FilteredAggregatorFactory; import io.druid.query.aggregation.FilteredAggregatorFactory;
import io.druid.query.aggregation.PostAggregator; import io.druid.query.aggregation.PostAggregator;
import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory; import io.druid.query.aggregation.cardinality.CardinalityAggregatorFactory;
@ -1641,6 +1642,27 @@ public class TopNQueryRunnerTest
) )
) )
); );
assertExpectedResults(expectedResults, query);
query = query.withAggregatorSpecs(
Arrays.asList(
QueryRunnerTestHelper.rowsCount,
new DoubleSumAggregatorFactory("index", null, "-index + 100")
)
);
expectedResults = Arrays.asList(
TopNQueryRunnerTestHelper.createExpectedRows(
"2011-01-12T00:00:00.000Z",
new String[]{QueryRunnerTestHelper.qualityDimension, "rows", "index", "addRowsIndexConstant"},
Arrays.asList(
new Object[]{"n", 93L, -2786.472755432129, -2692.472755432129},
new Object[]{"u", 186L, -3949.824363708496, -3762.824363708496}
)
)
);
assertExpectedResults(expectedResults, query); assertExpectedResults(expectedResults, query);
} }

View File

@ -0,0 +1,48 @@
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.topn;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.druid.query.Result;
import org.joda.time.DateTime;
import java.util.List;
import java.util.Map;
/**
*/
public class TopNQueryRunnerTestHelper
{
public static Result<TopNResultValue> createExpectedRows(String date, String[] columnNames, Iterable<Object[]> values)
{
List<Map> expected = Lists.newArrayList();
for (Object[] value : values) {
Preconditions.checkArgument(value.length == columnNames.length);
Map<String, Object> theVals = Maps.newHashMapWithExpectedSize(value.length);
for (int i = 0; i < columnNames.length; i++) {
theVals.put(columnNames[i], value[i]);
}
expected.add(theVals);
}
return new Result<TopNResultValue>(new DateTime(date), new TopNResultValue(expected));
}
}

View File

@ -32,6 +32,8 @@ import io.druid.data.input.impl.StringInputRowParser;
import io.druid.data.input.impl.TimestampSpec; import io.druid.data.input.impl.TimestampSpec;
import io.druid.granularity.QueryGranularities; import io.druid.granularity.QueryGranularities;
import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.AggregatorFactory;
import io.druid.query.aggregation.DoubleMaxAggregatorFactory;
import io.druid.query.aggregation.DoubleMinAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde;
@ -61,7 +63,9 @@ public class TestIndex
"index", "index",
"partial_null_column", "partial_null_column",
"null_column", "null_column",
"quality_uniques" "quality_uniques",
"indexMin",
"indexMaxPlusTen"
}; };
public static final String[] DIMENSIONS = new String[]{ public static final String[] DIMENSIONS = new String[]{
"market", "market",
@ -71,11 +75,13 @@ public class TestIndex
"partial_null_column", "partial_null_column",
"null_column", "null_column",
}; };
public static final String[] METRICS = new String[]{"index"}; public static final String[] METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"};
private static final Logger log = new Logger(TestIndex.class); private static final Logger log = new Logger(TestIndex.class);
private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z");
public static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ public static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{
new DoubleSumAggregatorFactory(METRICS[0], METRICS[0]), new DoubleSumAggregatorFactory(METRICS[0], METRICS[0]),
new DoubleMinAggregatorFactory(METRICS[1], METRICS[0]),
new DoubleMaxAggregatorFactory(METRICS[2], null, "index + 10"),
new HyperUniquesAggregatorFactory("quality_uniques", "quality") new HyperUniquesAggregatorFactory("quality_uniques", "quality")
}; };
private static final IndexSpec indexSpec = new IndexSpec(); private static final IndexSpec indexSpec = new IndexSpec();