diff --git a/server/pom.xml b/server/pom.xml
index deffcea39eb..17dc68d3108 100644
--- a/server/pom.xml
+++ b/server/pom.xml
@@ -132,10 +132,6 @@
org.eclipse.aether
aether-api
-
- org.antlr
- antlr4-runtime
-
net.spy
spymemcached
@@ -222,17 +218,6 @@
-
- org.antlr
- antlr4-maven-plugin
-
-
-
- antlr4
-
-
-
-
diff --git a/server/src/main/antlr4/io/druid/sql/antlr4/DruidSQL.g4 b/server/src/main/antlr4/io/druid/sql/antlr4/DruidSQL.g4
deleted file mode 100644
index b6e781f64ee..00000000000
--- a/server/src/main/antlr4/io/druid/sql/antlr4/DruidSQL.g4
+++ /dev/null
@@ -1,343 +0,0 @@
-grammar DruidSQL;
-
-@header {
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
-import io.druid.granularity.PeriodGranularity;
-import io.druid.granularity.QueryGranularity;
-import io.druid.query.aggregation.AggregatorFactory;
-import io.druid.query.aggregation.CountAggregatorFactory;
-import io.druid.query.aggregation.DoubleSumAggregatorFactory;
-import io.druid.query.aggregation.DoubleMaxAggregatorFactory;
-import io.druid.query.aggregation.DoubleMinAggregatorFactory;
-import io.druid.query.aggregation.PostAggregator;
-import io.druid.query.aggregation.post.ArithmeticPostAggregator;
-import io.druid.query.aggregation.post.ConstantPostAggregator;
-import io.druid.query.aggregation.post.FieldAccessPostAggregator;
-import io.druid.query.dimension.DefaultDimensionSpec;
-import io.druid.query.dimension.DimensionSpec;
-import io.druid.query.filter.AndDimFilter;
-import io.druid.query.filter.DimFilter;
-import io.druid.query.filter.NotDimFilter;
-import io.druid.query.filter.OrDimFilter;
-import io.druid.query.filter.RegexDimFilter;
-import io.druid.query.filter.SelectorDimFilter;
-import org.antlr.v4.runtime.NoViableAltException;
-import org.antlr.v4.runtime.Parser;
-import org.antlr.v4.runtime.ParserRuleContext;
-import org.antlr.v4.runtime.RecognitionException;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenStream;
-import org.antlr.v4.runtime.atn.ATN;
-import org.antlr.v4.runtime.atn.ATNSimulator;
-import org.antlr.v4.runtime.atn.ParserATNSimulator;
-import org.antlr.v4.runtime.atn.PredictionContextCache;
-import org.antlr.v4.runtime.dfa.DFA;
-import org.antlr.v4.runtime.tree.ParseTreeListener;
-import org.antlr.v4.runtime.tree.TerminalNode;
-import org.joda.time.DateTime;
-import org.joda.time.Period;
-
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-}
-
-@parser::members {
- public Map aggregators = new LinkedHashMap();
- public List postAggregators = new LinkedList();
- public DimFilter filter;
- public List intervals;
- public List fields = new LinkedList();
- public QueryGranularity granularity = QueryGranularity.ALL;
- public Map groupByDimensions = new LinkedHashMap();
-
- String dataSourceName = null;
-
- public String getDataSource() {
- return dataSourceName;
- }
-
- public String unescape(String quoted) {
- String unquote = quoted.trim().replaceFirst("^'(.*)'\$", "\$1");
- return unquote.replace("''", "'");
- }
-
- AggregatorFactory evalAgg(String name, int fn) {
- switch (fn) {
- case SUM: return new DoubleSumAggregatorFactory("sum("+name+")", name);
- case MIN: return new DoubleMinAggregatorFactory("min("+name+")", name);
- case MAX: return new DoubleMaxAggregatorFactory("max("+name+")", name);
- case COUNT: return new CountAggregatorFactory(name);
- }
- throw new IllegalArgumentException("Unknown function [" + fn + "]");
- }
-
- PostAggregator evalArithmeticPostAggregator(PostAggregator a, List ops, List b) {
- if(b.isEmpty()) return a;
- else {
- int i = 0;
-
- PostAggregator root = a;
- while(i < ops.size()) {
- List list = new LinkedList();
- List names = new LinkedList();
-
- names.add(root.getName());
- list.add(root);
-
- Token op = ops.get(i);
-
- while(i < ops.size() && ops.get(i).getType() == op.getType()) {
- PostAggregator e = b.get(i);
- list.add(e);
- names.add(e.getName());
- i++;
- }
-
- root = new ArithmeticPostAggregator("("+Joiner.on(op.getText()).join(names)+")", op.getText(), list);
- }
-
- return root;
- }
- }
-}
-
-
-AND: 'and';
-OR: 'or';
-SUM: 'sum';
-MIN: 'min';
-MAX: 'max';
-COUNT: 'count';
-AS: 'as';
-OPEN: '(';
-CLOSE: ')';
-STAR: '*';
-NOT: '!' ;
-PLUS: '+';
-MINUS: '-';
-DIV: '/';
-COMMA: ',';
-EQ: '=';
-NEQ: '!=';
-MATCH: '~';
-GROUP: 'group';
-
-IDENT : (LETTER)(LETTER | DIGIT | '_')* ;
-QUOTED_STRING : '\'' ( ESC | ~'\'' )*? '\'' ;
-ESC : '\'' '\'';
-
-NUMBER: DIGIT*'.'?DIGIT+(EXPONENT)?;
-EXPONENT: ('e') ('+'|'-')? ('0'..'9')+;
-fragment DIGIT : '0'..'9';
-fragment LETTER : 'a'..'z' | 'A'..'Z';
-
-LINE_COMMENT : '--' .*? '\r'? '\n' -> skip ;
-COMMENT : '/*' .*? '*/' -> skip ;
-WS : (' '| '\t' | '\r' '\n' | '\n' | '\r')+ -> skip;
-
-
-
-query
- : select_stmt where_stmt (groupby_stmt)?
- ;
-
-select_stmt
- : 'select' e+=aliasedExpression (',' e+=aliasedExpression)* 'from' datasource {
- for(AliasedExpressionContext a : $e) {
- postAggregators.add(a.p);
- fields.add(a.p.getName());
- }
- this.dataSourceName = $datasource.text;
- }
- ;
-
-where_stmt
- : 'where' f=timeAndDimFilter {
- if($f.filter != null) this.filter = $f.filter;
- this.intervals = Lists.newArrayList($f.interval);
- }
- ;
-
-groupby_stmt
- : GROUP 'by' groupByExpression ( COMMA! groupByExpression )*
- ;
-
-groupByExpression
- : gran=granularityFn {this.granularity = $gran.granularity;}
- | dim=IDENT { this.groupByDimensions.put($dim.text, new DefaultDimensionSpec($dim.text, $dim.text)); }
- ;
-
-datasource
- : IDENT
- ;
-
-aliasedExpression returns [PostAggregator p]
- : expression ( AS^ name=IDENT )? {
- if($name != null) {
- postAggregators.add($expression.p);
- $p = new FieldAccessPostAggregator($name.text, $expression.p.getName());
- }
- else $p = $expression.p;
- }
- ;
-
-expression returns [PostAggregator p]
- : additiveExpression { $p = $additiveExpression.p; }
- ;
-
-additiveExpression returns [PostAggregator p]
- : a=multiplyExpression (( ops+=PLUS^ | ops+=MINUS^ ) b+=multiplyExpression)* {
- List rhs = new LinkedList();
- for(MultiplyExpressionContext e : $b) rhs.add(e.p);
- $p = evalArithmeticPostAggregator($a.p, $ops, rhs);
- }
- ;
-
-multiplyExpression returns [PostAggregator p]
- : a=unaryExpression ((ops+= STAR | ops+=DIV ) b+=unaryExpression)* {
- List rhs = new LinkedList();
- for(UnaryExpressionContext e : $b) rhs.add(e.p);
- $p = evalArithmeticPostAggregator($a.p, $ops, rhs);
- }
- ;
-
-unaryExpression returns [PostAggregator p]
- : MINUS e=unaryExpression {
- if($e.p instanceof ConstantPostAggregator) {
- ConstantPostAggregator c = (ConstantPostAggregator)$e.p;
- double v = c.getConstantValue().doubleValue() * -1;
- $p = new ConstantPostAggregator(Double.toString(v), v);
- } else {
- $p = new ArithmeticPostAggregator(
- "-"+$e.p.getName(),
- "*",
- Lists.newArrayList($e.p, new ConstantPostAggregator("-1", -1.0))
- );
- }
- }
- | PLUS e=unaryExpression { $p = $e.p; }
- | primaryExpression { $p = $primaryExpression.p; }
- ;
-
-primaryExpression returns [PostAggregator p]
- : constant { $p = $constant.c; }
- | aggregate {
- aggregators.put($aggregate.agg.getName(), $aggregate.agg);
- $p = new FieldAccessPostAggregator($aggregate.agg.getName(), $aggregate.agg.getName());
- }
- | OPEN! e=expression CLOSE! { $p = $e.p; }
- ;
-
-aggregate returns [AggregatorFactory agg]
- : fn=( SUM^ | MIN^ | MAX^ ) OPEN! name=(IDENT|COUNT) CLOSE! { $agg = evalAgg($name.text, $fn.type); }
- | fn=COUNT OPEN! STAR CLOSE! { $agg = evalAgg("count(*)", $fn.type); }
- ;
-
-constant returns [ConstantPostAggregator c]
- : value=NUMBER { double v = Double.parseDouble($value.text); $c = new ConstantPostAggregator(Double.toString(v), v); }
- ;
-
-/* time filters must be top level filters */
-timeAndDimFilter returns [DimFilter filter, org.joda.time.Interval interval]
- : (f1=dimFilter AND)? t=timeFilter (AND f2=dimFilter)? {
- if($f1.ctx != null || $f2.ctx != null) {
- if($f1.ctx != null && $f2.ctx != null) {
- $filter = new AndDimFilter(Lists.newArrayList($f1.filter, $f2.filter));
- } else if($f1.ctx != null) {
- $filter = $f1.filter;
- } else {
- $filter = $f2.filter;
- }
- }
- $interval = $t.interval;
- }
- ;
-
-dimFilter returns [DimFilter filter]
- : e=orDimFilter { $filter = $e.filter; }
- ;
-
-orDimFilter returns [DimFilter filter]
- : a=andDimFilter (OR^ b+=andDimFilter)* {
- if($b.isEmpty()) $filter = $a.filter;
- else {
- List rest = new ArrayList();
- for(AndDimFilterContext e : $b) rest.add(e.filter);
- $filter = new OrDimFilter(Lists.asList($a.filter, rest.toArray(new DimFilter[]{})));
- }
- }
- ;
-
-andDimFilter returns [DimFilter filter]
- : a=primaryDimFilter (AND^ b+=primaryDimFilter)* {
- if($b.isEmpty()) $filter = $a.filter;
- else {
- List rest = new ArrayList();
- for(PrimaryDimFilterContext e : $b) rest.add(e.filter);
- $filter = new AndDimFilter(Lists.asList($a.filter, rest.toArray(new DimFilter[]{})));
- }
- }
- ;
-
-primaryDimFilter returns [DimFilter filter]
- : e=selectorDimFilter { $filter = $e.filter; }
- | l=inListDimFilter { $filter = $l.filter; }
- | NOT f=dimFilter { $filter = new NotDimFilter($f.filter); }
- | OPEN! f=dimFilter CLOSE! { $filter = $f.filter; }
- ;
-
-selectorDimFilter returns [DimFilter filter]
- : dimension=IDENT op=(EQ|NEQ|MATCH) value=QUOTED_STRING {
- String dim = $dimension.text;
- String val = unescape($value.text);
- switch($op.type) {
- case(EQ): $filter = new SelectorDimFilter(dim, val, null); break;
- case(NEQ): $filter = new NotDimFilter(new SelectorDimFilter(dim, val, null)); break;
- case(MATCH): $filter = new RegexDimFilter(dim, val, null); break;
- }
- }
- ;
-
-inListDimFilter returns [DimFilter filter]
- : dimension=IDENT 'in' (OPEN! ( (list+=QUOTED_STRING (COMMA! list+=QUOTED_STRING)*) ) CLOSE!) {
- List filterList = new LinkedList();
- for(Token e : $list) filterList.add(new SelectorDimFilter($dimension.text, unescape(e.getText()), null));
- $filter = new OrDimFilter(filterList);
- }
- ;
-
-timeFilter returns [org.joda.time.Interval interval, QueryGranularity granularity]
- : 'timestamp' 'between' s=timestamp AND e=timestamp {
- $interval = new org.joda.time.Interval($s.t, $e.t);
- }
- ;
-
-granularityFn returns [QueryGranularity granularity]
- : 'granularity' OPEN! 'timestamp' ',' str=QUOTED_STRING CLOSE! {
- String granStr = unescape($str.text);
- try {
- $granularity = QueryGranularity.fromString(granStr);
- } catch(IllegalArgumentException e) {
- $granularity = new PeriodGranularity(new Period(granStr), null, null);
- }
- }
- ;
-
-timestamp returns [DateTime t]
- : NUMBER {
- String str = $NUMBER.text.trim();
- try {
- $t = new DateTime(NumberFormat.getInstance().parse(str));
- }
- catch(ParseException e) {
- throw new IllegalArgumentException("Unable to parse number [" + str + "]");
- }
- }
- | QUOTED_STRING { $t = new DateTime(unescape($QUOTED_STRING.text)); }
- ;
diff --git a/server/src/main/java/io/druid/server/sql/SQLRunner.java b/server/src/main/java/io/druid/server/sql/SQLRunner.java
deleted file mode 100644
index eda71827f0a..00000000000
--- a/server/src/main/java/io/druid/server/sql/SQLRunner.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Licensed to Metamarkets Group Inc. (Metamarkets) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. Metamarkets licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package io.druid.server.sql;
-
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
-import com.google.common.base.Charsets;
-import com.google.common.base.Function;
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterables;
-import com.google.common.collect.Lists;
-import com.metamx.common.StringUtils;
-import com.metamx.common.guava.CloseQuietly;
-import io.druid.data.input.Row;
-import io.druid.jackson.DefaultObjectMapper;
-import io.druid.query.Druids;
-import io.druid.query.Query;
-import io.druid.query.Result;
-import io.druid.query.aggregation.AggregatorFactory;
-import io.druid.query.dimension.DimensionSpec;
-import io.druid.query.groupby.GroupByQuery;
-import io.druid.query.timeseries.TimeseriesResultValue;
-import io.druid.sql.antlr4.DruidSQLLexer;
-import io.druid.sql.antlr4.DruidSQLParser;
-import org.antlr.v4.runtime.ANTLRInputStream;
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.antlr.v4.runtime.ConsoleErrorListener;
-import org.antlr.v4.runtime.TokenStream;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Options;
-
-import javax.annotation.Nullable;
-import javax.ws.rs.core.MediaType;
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.ArrayList;
-import java.util.List;
-
-public class SQLRunner
-{
- private static final String STATEMENT = "select count(*), (1 - count(*) / sum(count)) * 100 as ratio from wikipedia where"
- + " timestamp between '2013-02-01' and '2013-02-14'"
- + " and (namespace = 'article' or page ~ 'Talk:.*')"
- + " and language in ( 'en', 'fr' ) "
- + " and user ~ '(?i)^david.*'"
- + " group by granularity(timestamp, 'day'), language";
-
- public static void main(String[] args) throws Exception
- {
-
- Options options = new Options();
- options.addOption("h", "help", false, "help");
- options.addOption("v", false, "verbose");
- options.addOption("e", "host", true, "endpoint [hostname:port]");
-
- CommandLine cmd = new GnuParser().parse(options, args);
-
- if(cmd.hasOption("h")) {
- HelpFormatter formatter = new HelpFormatter();
- formatter.printHelp("SQLRunner", options);
- System.exit(2);
- }
-
- String hostname = cmd.getOptionValue("e", "localhost:8080");
- String sql = cmd.getArgs().length > 0 ? cmd.getArgs()[0] : STATEMENT;
-
- ObjectMapper objectMapper = new DefaultObjectMapper();
- ObjectWriter jsonWriter = objectMapper.writerWithDefaultPrettyPrinter();
-
- CharStream stream = new ANTLRInputStream(sql);
- DruidSQLLexer lexer = new DruidSQLLexer(stream);
- TokenStream tokenStream = new CommonTokenStream(lexer);
- DruidSQLParser parser = new DruidSQLParser(tokenStream);
- lexer.removeErrorListeners();
- parser.removeErrorListeners();
-
- lexer.addErrorListener(ConsoleErrorListener.INSTANCE);
- parser.addErrorListener(ConsoleErrorListener.INSTANCE);
-
- try {
- DruidSQLParser.QueryContext queryContext = parser.query();
- if(parser.getNumberOfSyntaxErrors() > 0) throw new IllegalStateException();
-// parser.setBuildParseTree(true);
-// System.err.println(q.toStringTree(parser));
- } catch(Exception e) {
- String msg = e.getMessage();
- if(msg != null) System.err.println(e);
- System.exit(1);
- }
-
- final Query query;
- final TypeReference typeRef;
- boolean groupBy = false;
- if(parser.groupByDimensions.isEmpty()) {
- query = Druids.newTimeseriesQueryBuilder()
- .dataSource(parser.getDataSource())
- .aggregators(new ArrayList(parser.aggregators.values()))
- .postAggregators(parser.postAggregators)
- .intervals(parser.intervals)
- .granularity(parser.granularity)
- .filters(parser.filter)
- .build();
-
- typeRef = new TypeReference>>(){};
- } else {
- query = GroupByQuery.builder()
- .setDataSource(parser.getDataSource())
- .setAggregatorSpecs(new ArrayList(parser.aggregators.values()))
- .setPostAggregatorSpecs(parser.postAggregators)
- .setInterval(parser.intervals)
- .setGranularity(parser.granularity)
- .setDimFilter(parser.filter)
- .setDimensions(new ArrayList(parser.groupByDimensions.values()))
- .build();
-
- typeRef = new TypeReference>(){};
- groupBy = true;
- }
-
- String queryStr = jsonWriter.writeValueAsString(query);
- if(cmd.hasOption("v")) System.err.println(queryStr);
-
- URL url = new URL(String.format("http://%s/druid/v2/?pretty", hostname));
- final URLConnection urlConnection = url.openConnection();
- urlConnection.addRequestProperty("content-type", MediaType.APPLICATION_JSON);
- urlConnection.getOutputStream().write(StringUtils.toUtf8(queryStr));
- BufferedReader stdInput = new BufferedReader(new InputStreamReader(urlConnection.getInputStream(), Charsets.UTF_8));
-
- Object res = objectMapper.readValue(stdInput, typeRef);
-
- Joiner tabJoiner = Joiner.on("\t");
-
- if(groupBy) {
- List rows = (List)res;
- Iterable dimensions = Iterables.transform(parser.groupByDimensions.values(), new Function()
- {
- @Override
- public String apply(@Nullable DimensionSpec input)
- {
- return input.getOutputName();
- }
- });
-
- System.out.println(tabJoiner.join(Iterables.concat(
- Lists.newArrayList("timestamp"),
- dimensions,
- parser.fields
- )));
- for(final Row r : rows) {
- System.out.println(
- tabJoiner.join(
- Iterables.concat(
- Lists.newArrayList(parser.granularity.toDateTime(r.getTimestampFromEpoch())),
- Iterables.transform(
- parser.groupByDimensions.values(), new Function()
- {
- @Override
- public String apply(@Nullable DimensionSpec input)
- {
- return Joiner.on(",").join(r.getDimension(input.getOutputName()));
- }
- }),
- Iterables.transform(parser.fields, new Function()
- {
- @Override
- public Object apply(@Nullable String input)
- {
- return r.getFloatMetric(input);
- }
- })
- )
- )
- );
- }
- }
- else {
- List> rows = (List>)res;
- System.out.println(tabJoiner.join(Iterables.concat(
- Lists.newArrayList("timestamp"),
- parser.fields
- )));
- for(final Result r : rows) {
- System.out.println(
- tabJoiner.join(
- Iterables.concat(
- Lists.newArrayList(r.getTimestamp()),
- Lists.transform(
- parser.fields,
- new Function()
- {
- @Override
- public Object apply(@Nullable String input)
- {
- return r.getValue().getMetric(input);
- }
- }
- )
- )
- )
- );
- }
- }
-
- CloseQuietly.close(stdInput);
- }
-}