SQL: Extend HAVING support

Enhance Analyzer to support HAVING scalar functions over aggregates
Enhance Analyzer to push down undeclared aggs into the Aggregate
Fix bug in Analyzer$MissingRef that caused invalid groupings to still be resolved when pushed into an Aggregate
Preserve location information across the plan
Add AttributeMap as a backing for AttributeSet
Add Optimizer rule for combining projects
Add tz to DT functions toString
Change formatTemplate to not use String.format and thus to avoid
interfering with special % chars
Extend dataset with random salary and languages
Add unit tests for AttributeMap
Fix MathFunction scripting
Improve MissingRefs to enrich UnresolvedAttribute with metadata
During the Analysis unpushed attributes are automatically enriched to
provide more accurate error information
Enhance Verifier to deal with invalid (but resolved) ORDER/HAVING
Add OrderBy arithmetic tests
Improve Verifier to prevent GROUP BY on aggregations
Add tests on grouping by scalar functions

Original commit: elastic/x-pack-elasticsearch@5030d7a755
This commit is contained in:
Costin Leau 2017-12-05 18:14:15 +02:00 committed by GitHub
parent 7b701cbf88
commit 4e49769efb
54 changed files with 1968 additions and 1110 deletions

View File

@ -50,11 +50,13 @@ public class DataLoader {
createIndex.startObject("properties");
{
createIndex.startObject("emp_no").field("type", "integer").endObject();
createIndex.startObject("birth_date").field("type", "date").endObject();
createIndex.startObject("first_name").field("type", "text").endObject();
createIndex.startObject("last_name").field("type", "text").endObject();
createIndex.startObject("gender").field("type", "keyword").endObject();
createIndex.startObject("birth_date").field("type", "date").endObject();
createIndex.startObject("hire_date").field("type", "date").endObject();
createIndex.startObject("salary").field("type", "integer").endObject();
createIndex.startObject("languages").field("type", "byte").endObject();
}
createIndex.endObject();
}

View File

@ -21,6 +21,7 @@ import java.util.TimeZone;
import static java.lang.String.format;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class JdbcAssert {
private static final Calendar UTC_CALENDAR = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
@ -101,7 +102,7 @@ public class JdbcAssert {
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
if (logger != null) {
JdbcTestUtils.logResultSetCurrentData(actual, logger);
logger.info(JdbcTestUtils.resultSetCurrentData(actual));
}
for (int column = 1; column <= columns; column++) {
@ -125,7 +126,11 @@ public class JdbcAssert {
}
}
}
assertEquals("Elasticsearch [" + actual + "] still has data after [" + count + "] entries", expected.next(), actual.next());
if (actual.next()) {
fail("Elasticsearch [" + actual + "] still has data after [" + count + "] entries:\n"
+ JdbcTestUtils.resultSetCurrentData(actual));
}
}
private static Object getTime(ResultSet rs, int column) throws SQLException {

View File

@ -67,7 +67,7 @@ public abstract class JdbcTestUtils {
}
}
public static void logResultSetCurrentData(ResultSet rs, Logger log) throws SQLException {
public static String resultSetCurrentData(ResultSet rs) throws SQLException {
ResultSetMetaData metaData = rs.getMetaData();
StringBuilder column = new StringBuilder();
@ -81,7 +81,7 @@ public abstract class JdbcTestUtils {
}
sb.append(trimOrPad(column.append(rs.getString(i))));
}
log.info(sb);
return sb.toString();
}
private static StringBuilder trimOrPad(StringBuilder buffer) {

View File

@ -31,6 +31,18 @@ SELECT emp_no e FROM "test_emp" WHERE emp_no < 10020 GROUP BY emp_no ORDER BY em
groupByOnNumberOnAlias
SELECT emp_no e FROM "test_emp" WHERE emp_no < 10020 GROUP BY e ORDER BY emp_no DESC;
// group by scalar
groupByAddScalar
SELECT emp_no + 1 AS e FROM test_emp GROUP BY e ORDER BY e;
groupByMinScalarDesc
SELECT emp_no - 1 AS e FROM test_emp GROUP BY e ORDER BY e DESC;
groupByAddScalarDesc
SELECT emp_no % 2 AS e FROM test_emp GROUP BY e ORDER BY e DESC;
groupByMulScalar
SELECT emp_no * 2 AS e FROM test_emp GROUP BY e ORDER BY e;
groupByModScalar
SELECT (emp_no % 3) + 1 AS e FROM test_emp GROUP BY e ORDER BY e;
//
// Aggregate Functions
//
@ -215,3 +227,27 @@ aggAvgWithMultipleHavingBetweenWithLimit
SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "test_emp" GROUP BY g HAVING a BETWEEN 10 AND 10000000 LIMIT 1;
aggAvgWithMultipleHavingOnAliasAndFunction
SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "test_emp" GROUP BY g HAVING a > 10 AND AVG(emp_no) > 10000000;
//
// GroupBy on Scalar plus Having
//
aggGroupByOnScalarWithHaving
SELECT emp_no + 1 AS e FROM test_emp GROUP BY e HAVING AVG(salary) BETWEEN 1 AND 10010 ORDER BY e;
//
// Mixture of Aggs that triggers promotion of aggs to stats
//
aggMultiIncludingScalarFunction
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages ORDER BY languages;
aggHavingWithAggNotInGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING AVG(salary) > 30000 ORDER BY languages;
aggHavingWithAliasOnScalarFromGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING d BETWEEN 50 AND 10000 AND AVG(salary) > 30000 ORDER BY languages;
aggHavingWithScalarFunctionBasedOnAliasFromGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING ma % mi > 1 AND AVG(salary) > 30000 ORDER BY languages;
aggHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING d - ma % mi > 0 AND AVG(salary) > 30000 ORDER BY languages;
aggHavingWithMultipleScalarFunctionsBasedOnAliasFromGroupByAndAggNotInGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING ROUND(d - ABS(ma % mi)) + AVG(salary) > 0 AND AVG(salary) > 30000 ORDER BY languages;
aggHavingScalarOnAggFunctionsWithoutAliasesInAndNotInGroupBy
SELECT MIN(salary) mi, MAX(salary) ma, MAX(salary) - MIN(salary) AS d FROM test_emp GROUP BY languages HAVING MAX(salary) % MIN(salary) + AVG(salary) > 3000 ORDER BY languages;

View File

@ -62,4 +62,14 @@ SELECT MAX(emp_no) - MIN(emp_no) AS x FROM test_emp GROUP BY gender;
aggVariableThreeInputs
SELECT (MAX(emp_no) - MIN(emp_no)) + AVG(emp_no) AS x FROM test_emp GROUP BY gender;
// ordering
orderByPlus
SELECT emp_no FROM test_emp ORDER BY emp_no + 2 LIMIT 10;
orderByNegative
SELECT emp_no FROM test_emp ORDER BY -emp_no LIMIT 10;
orderByMinusDesc
SELECT emp_no FROM test_emp ORDER BY -emp_no DESC LIMIT 10;
orderByModulo
SELECT emp_no FROM test_emp ORDER BY emp_no % 10000 LIMIT 10;
orderByMul
SELECT emp_no FROM test_emp ORDER BY emp_no * 2 LIMIT 10;

View File

@ -118,5 +118,7 @@ emp_no |INTEGER
first_name |VARCHAR
gender |VARCHAR
hire_date |TIMESTAMP
languages |TINYINT
last_name |VARCHAR
salary |INTEGER
;

View File

@ -1,101 +1,101 @@
birth_date,emp_no,first_name,gender,hire_date,last_name
1953-09-02T00:00:00Z,10001,Georgi,M,1986-06-26T00:00:00Z,Facello
1964-06-02T00:00:00Z,10002,Bezalel,F,1985-11-21T00:00:00Z,Simmel
1959-12-03T00:00:00Z,10003,Parto,M,1986-08-28T00:00:00Z,Bamford
1954-05-01T00:00:00Z,10004,Chirstian,M,1986-12-01T00:00:00Z,Koblick
1955-01-21T00:00:00Z,10005,Kyoichi,M,1989-09-12T00:00:00Z,Maliniak
1953-04-20T00:00:00Z,10006,Anneke,F,1989-06-02T00:00:00Z,Preusig
1957-05-23T00:00:00Z,10007,Tzvetan,F,1989-02-10T00:00:00Z,Zielinski
1958-02-19T00:00:00Z,10008,Saniya,M,1994-09-15T00:00:00Z,Kalloufi
1952-04-19T00:00:00Z,10009,Sumant,F,1985-02-18T00:00:00Z,Peac
1963-06-01T00:00:00Z,10010,Duangkaew,F,1989-08-24T00:00:00Z,Piveteau
1953-11-07T00:00:00Z,10011,Mary,F,1990-01-22T00:00:00Z,Sluis
1960-10-04T00:00:00Z,10012,Patricio,M,1992-12-18T00:00:00Z,Bridgland
1963-06-07T00:00:00Z,10013,Eberhardt,M,1985-10-20T00:00:00Z,Terkki
1956-02-12T00:00:00Z,10014,Berni,M,1987-03-11T00:00:00Z,Genin
1959-08-19T00:00:00Z,10015,Guoxiang,M,1987-07-02T00:00:00Z,Nooteboom
1961-05-02T00:00:00Z,10016,Kazuhito,M,1995-01-27T00:00:00Z,Cappelletti
1958-07-06T00:00:00Z,10017,Cristinel,F,1993-08-03T00:00:00Z,Bouloucos
1954-06-19T00:00:00Z,10018,Kazuhide,F,1987-04-03T00:00:00Z,Peha
1953-01-23T00:00:00Z,10019,Lillian,M,1999-04-30T00:00:00Z,Haddadi
1952-12-24T00:00:00Z,10020,Mayuko,M,1991-01-26T00:00:00Z,Warwick
1960-02-20T00:00:00Z,10021,Ramzi,M,1988-02-10T00:00:00Z,Erde
1952-07-08T00:00:00Z,10022,Shahaf,M,1995-08-22T00:00:00Z,Famili
1953-09-29T00:00:00Z,10023,Bojan,F,1989-12-17T00:00:00Z,Montemayor
1958-09-05T00:00:00Z,10024,Suzette,F,1997-05-19T00:00:00Z,Pettey
1958-10-31T00:00:00Z,10025,Prasadram,M,1987-08-17T00:00:00Z,Heyers
1953-04-03T00:00:00Z,10026,Yongqiao,M,1995-03-20T00:00:00Z,Berztiss
1962-07-10T00:00:00Z,10027,Divier,F,1989-07-07T00:00:00Z,Reistad
1963-11-26T00:00:00Z,10028,Domenick,M,1991-10-22T00:00:00Z,Tempesti
1956-12-13T00:00:00Z,10029,Otmar,M,1985-11-20T00:00:00Z,Herbst
1958-07-14T00:00:00Z,10030,Elvis,M,1994-02-17T00:00:00Z,Demeyer
1959-01-27T00:00:00Z,10031,Karsten,M,1991-09-01T00:00:00Z,Joslin
1960-08-09T00:00:00Z,10032,Jeong,F,1990-06-20T00:00:00Z,Reistad
1956-11-14T00:00:00Z,10033,Arif,M,1987-03-18T00:00:00Z,Merlo
1962-12-29T00:00:00Z,10034,Bader,M,1988-09-21T00:00:00Z,Swan
1953-02-08T00:00:00Z,10035,Alain,M,1988-09-05T00:00:00Z,Chappelet
1959-08-10T00:00:00Z,10036,Adamantios,M,1992-01-03T00:00:00Z,Portugali
1963-07-22T00:00:00Z,10037,Pradeep,M,1990-12-05T00:00:00Z,Makrucki
1960-07-20T00:00:00Z,10038,Huan,M,1989-09-20T00:00:00Z,Lortz
1959-10-01T00:00:00Z,10039,Alejandro,M,1988-01-19T00:00:00Z,Brender
1959-09-13T00:00:00Z,10040,Weiyi,F,1993-02-14T00:00:00Z,Meriste
1959-08-27T00:00:00Z,10041,Uri,F,1989-11-12T00:00:00Z,Lenart
1956-02-26T00:00:00Z,10042,Magy,F,1993-03-21T00:00:00Z,Stamatiou
1960-09-19T00:00:00Z,10043,Yishay,M,1990-10-20T00:00:00Z,Tzvieli
1961-09-21T00:00:00Z,10044,Mingsen,F,1994-05-21T00:00:00Z,Casley
1957-08-14T00:00:00Z,10045,Moss,M,1989-09-02T00:00:00Z,Shanbhogue
1960-07-23T00:00:00Z,10046,Lucien,M,1992-06-20T00:00:00Z,Rosenbaum
1952-06-29T00:00:00Z,10047,Zvonko,M,1989-03-31T00:00:00Z,Nyanchama
1963-07-11T00:00:00Z,10048,Florian,M,1985-02-24T00:00:00Z,Syrotiuk
1961-04-24T00:00:00Z,10049,Basil,F,1992-05-04T00:00:00Z,Tramer
1958-05-21T00:00:00Z,10050,Yinghua,M,1990-12-25T00:00:00Z,Dredge
1953-07-28T00:00:00Z,10051,Hidefumi,M,1992-10-15T00:00:00Z,Caine
1961-02-26T00:00:00Z,10052,Heping,M,1988-05-21T00:00:00Z,Nitsch
1954-09-13T00:00:00Z,10053,Sanjiv,F,1986-02-04T00:00:00Z,Zschoche
1957-04-04T00:00:00Z,10054,Mayumi,M,1995-03-13T00:00:00Z,Schueller
1956-06-06T00:00:00Z,10055,Georgy,M,1992-04-27T00:00:00Z,Dredge
1961-09-01T00:00:00Z,10056,Brendon,F,1990-02-01T00:00:00Z,Bernini
1954-05-30T00:00:00Z,10057,Ebbe,F,1992-01-15T00:00:00Z,Callaway
1954-10-01T00:00:00Z,10058,Berhard,M,1987-04-13T00:00:00Z,McFarlin
1953-09-19T00:00:00Z,10059,Alejandro,F,1991-06-26T00:00:00Z,McAlpine
1961-10-15T00:00:00Z,10060,Breannda,M,1987-11-02T00:00:00Z,Billingsley
1962-10-19T00:00:00Z,10061,Tse,M,1985-09-17T00:00:00Z,Herber
1961-11-02T00:00:00Z,10062,Anoosh,M,1991-08-30T00:00:00Z,Peyn
1952-08-06T00:00:00Z,10063,Gino,F,1989-04-08T00:00:00Z,Leonhardt
1959-04-07T00:00:00Z,10064,Udi,M,1985-11-20T00:00:00Z,Jansch
1963-04-14T00:00:00Z,10065,Satosi,M,1988-05-18T00:00:00Z,Awdeh
1952-11-13T00:00:00Z,10066,Kwee,M,1986-02-26T00:00:00Z,Schusler
1953-01-07T00:00:00Z,10067,Claudi,M,1987-03-04T00:00:00Z,Stavenow
1962-11-26T00:00:00Z,10068,Charlene,M,1987-08-07T00:00:00Z,Brattka
1960-09-06T00:00:00Z,10069,Margareta,F,1989-11-05T00:00:00Z,Bierman
1955-08-20T00:00:00Z,10070,Reuven,M,1985-10-14T00:00:00Z,Garigliano
1958-01-21T00:00:00Z,10071,Hisao,M,1987-10-01T00:00:00Z,Lipner
1952-05-15T00:00:00Z,10072,Hironoby,F,1988-07-21T00:00:00Z,Sidou
1954-02-23T00:00:00Z,10073,Shir,M,1991-12-01T00:00:00Z,McClurg
1955-08-28T00:00:00Z,10074,Mokhtar,F,1990-08-13T00:00:00Z,Bernatsky
1960-03-09T00:00:00Z,10075,Gao,F,1987-03-19T00:00:00Z,Dolinsky
1952-06-13T00:00:00Z,10076,Erez,F,1985-07-09T00:00:00Z,Ritzmann
1964-04-18T00:00:00Z,10077,Mona,M,1990-03-02T00:00:00Z,Azuma
1959-12-25T00:00:00Z,10078,Danel,F,1987-05-26T00:00:00Z,Mondadori
1961-10-05T00:00:00Z,10079,Kshitij,F,1986-03-27T00:00:00Z,Gils
1957-12-03T00:00:00Z,10080,Premal,M,1985-11-19T00:00:00Z,Baek
1960-12-17T00:00:00Z,10081,Zhongwei,M,1986-10-30T00:00:00Z,Rosen
1963-09-09T00:00:00Z,10082,Parviz,M,1990-01-03T00:00:00Z,Lortz
1959-07-23T00:00:00Z,10083,Vishv,M,1987-03-31T00:00:00Z,Zockler
1960-05-25T00:00:00Z,10084,Tuval,M,1995-12-15T00:00:00Z,Kalloufi
1962-11-07T00:00:00Z,10085,Kenroku,M,1994-04-09T00:00:00Z,Malabarba
1962-11-19T00:00:00Z,10086,Somnath,M,1990-02-16T00:00:00Z,Foote
1959-07-23T00:00:00Z,10087,Xinglin,F,1986-09-08T00:00:00Z,Eugenio
1954-02-25T00:00:00Z,10088,Jungsoon,F,1988-09-02T00:00:00Z,Syrzycki
1963-03-21T00:00:00Z,10089,Sudharsan,F,1986-08-12T00:00:00Z,Flasterstein
1961-05-30T00:00:00Z,10090,Kendra,M,1986-03-14T00:00:00Z,Hofting
1955-10-04T00:00:00Z,10091,Amabile,M,1992-11-18T00:00:00Z,Gomatam
1964-10-18T00:00:00Z,10092,Valdiodio,F,1989-09-22T00:00:00Z,Niizuma
1964-06-11T00:00:00Z,10093,Sailaja,M,1996-11-05T00:00:00Z,Desikan
1957-05-25T00:00:00Z,10094,Arumugam,F,1987-04-18T00:00:00Z,Ossenbruggen
1965-01-03T00:00:00Z,10095,Hilari,M,1986-07-15T00:00:00Z,Morton
1954-09-16T00:00:00Z,10096,Jayson,M,1990-01-14T00:00:00Z,Mandell
1952-02-27T00:00:00Z,10097,Remzi,M,1990-09-15T00:00:00Z,Waschkowski
1961-09-23T00:00:00Z,10098,Sreekrishna,F,1985-05-13T00:00:00Z,Servieres
1956-05-25T00:00:00Z,10099,Valter,F,1988-10-18T00:00:00Z,Sullins
1953-04-21T00:00:00Z,10100,Hironobu,F,1987-09-21T00:00:00Z,Haraldson
birth_date,emp_no,first_name,gender,hire_date,languages,last_name,salary
1953-09-02T00:00:00Z,10001,Georgi,M,1986-06-26T00:00:00Z,2,Facello,57305
1964-06-02T00:00:00Z,10002,Bezalel,F,1985-11-21T00:00:00Z,5,Simmel,56371
1959-12-03T00:00:00Z,10003,Parto,M,1986-08-28T00:00:00Z,4,Bamford,61805
1954-05-01T00:00:00Z,10004,Chirstian,M,1986-12-01T00:00:00Z,5,Koblick,36174
1955-01-21T00:00:00Z,10005,Kyoichi,M,1989-09-12T00:00:00Z,1,Maliniak,63528
1953-04-20T00:00:00Z,10006,Anneke,F,1989-06-02T00:00:00Z,3,Preusig,60335
1957-05-23T00:00:00Z,10007,Tzvetan,F,1989-02-10T00:00:00Z,4,Zielinski,74572
1958-02-19T00:00:00Z,10008,Saniya,M,1994-09-15T00:00:00Z,2,Kalloufi,43906
1952-04-19T00:00:00Z,10009,Sumant,F,1985-02-18T00:00:00Z,1,Peac,66174
1963-06-01T00:00:00Z,10010,Duangkaew,F,1989-08-24T00:00:00Z,4,Piveteau,45797
1953-11-07T00:00:00Z,10011,Mary,F,1990-01-22T00:00:00Z,5,Sluis,31120
1960-10-04T00:00:00Z,10012,Patricio,M,1992-12-18T00:00:00Z,5,Bridgland,48942
1963-06-07T00:00:00Z,10013,Eberhardt,M,1985-10-20T00:00:00Z,1,Terkki,48735
1956-02-12T00:00:00Z,10014,Berni,M,1987-03-11T00:00:00Z,5,Genin,37137
1959-08-19T00:00:00Z,10015,Guoxiang,M,1987-07-02T00:00:00Z,5,Nooteboom,25324
1961-05-02T00:00:00Z,10016,Kazuhito,M,1995-01-27T00:00:00Z,2,Cappelletti,61358
1958-07-06T00:00:00Z,10017,Cristinel,F,1993-08-03T00:00:00Z,2,Bouloucos,58715
1954-06-19T00:00:00Z,10018,Kazuhide,F,1987-04-03T00:00:00Z,2,Peha,56760
1953-01-23T00:00:00Z,10019,Lillian,M,1999-04-30T00:00:00Z,1,Haddadi,73717
1952-12-24T00:00:00Z,10020,Mayuko,M,1991-01-26T00:00:00Z,3,Warwick,40031
1960-02-20T00:00:00Z,10021,Ramzi,M,1988-02-10T00:00:00Z,5,Erde,60408
1952-07-08T00:00:00Z,10022,Shahaf,M,1995-08-22T00:00:00Z,3,Famili,48233
1953-09-29T00:00:00Z,10023,Bojan,F,1989-12-17T00:00:00Z,2,Montemayor,47896
1958-09-05T00:00:00Z,10024,Suzette,F,1997-05-19T00:00:00Z,3,Pettey,64675
1958-10-31T00:00:00Z,10025,Prasadram,M,1987-08-17T00:00:00Z,5,Heyers,47411
1953-04-03T00:00:00Z,10026,Yongqiao,M,1995-03-20T00:00:00Z,3,Berztiss,28336
1962-07-10T00:00:00Z,10027,Divier,F,1989-07-07T00:00:00Z,5,Reistad,73851
1963-11-26T00:00:00Z,10028,Domenick,M,1991-10-22T00:00:00Z,1,Tempesti,39356
1956-12-13T00:00:00Z,10029,Otmar,M,1985-11-20T00:00:00Z,3,Herbst,74999
1958-07-14T00:00:00Z,10030,Elvis,M,1994-02-17T00:00:00Z,3,Demeyer,67492
1959-01-27T00:00:00Z,10031,Karsten,M,1991-09-01T00:00:00Z,4,Joslin,37716
1960-08-09T00:00:00Z,10032,Jeong,F,1990-06-20T00:00:00Z,3,Reistad,62233
1956-11-14T00:00:00Z,10033,Arif,M,1987-03-18T00:00:00Z,1,Merlo,70011
1962-12-29T00:00:00Z,10034,Bader,M,1988-09-21T00:00:00Z,1,Swan,39878
1953-02-08T00:00:00Z,10035,Alain,M,1988-09-05T00:00:00Z,5,Chappelet,25945
1959-08-10T00:00:00Z,10036,Adamantios,M,1992-01-03T00:00:00Z,4,Portugali,60781
1963-07-22T00:00:00Z,10037,Pradeep,M,1990-12-05T00:00:00Z,2,Makrucki,37691
1960-07-20T00:00:00Z,10038,Huan,M,1989-09-20T00:00:00Z,4,Lortz,35222
1959-10-01T00:00:00Z,10039,Alejandro,M,1988-01-19T00:00:00Z,2,Brender,36051
1959-09-13T00:00:00Z,10040,Weiyi,F,1993-02-14T00:00:00Z,4,Meriste,37112
1959-08-27T00:00:00Z,10041,Uri,F,1989-11-12T00:00:00Z,1,Lenart,56415
1956-02-26T00:00:00Z,10042,Magy,F,1993-03-21T00:00:00Z,3,Stamatiou,30404
1960-09-19T00:00:00Z,10043,Yishay,M,1990-10-20T00:00:00Z,1,Tzvieli,34341
1961-09-21T00:00:00Z,10044,Mingsen,F,1994-05-21T00:00:00Z,1,Casley,39728
1957-08-14T00:00:00Z,10045,Moss,M,1989-09-02T00:00:00Z,3,Shanbhogue,74970
1960-07-23T00:00:00Z,10046,Lucien,M,1992-06-20T00:00:00Z,4,Rosenbaum,50064
1952-06-29T00:00:00Z,10047,Zvonko,M,1989-03-31T00:00:00Z,4,Nyanchama,42716
1963-07-11T00:00:00Z,10048,Florian,M,1985-02-24T00:00:00Z,3,Syrotiuk,26436
1961-04-24T00:00:00Z,10049,Basil,F,1992-05-04T00:00:00Z,5,Tramer,37853
1958-05-21T00:00:00Z,10050,Yinghua,M,1990-12-25T00:00:00Z,2,Dredge,43026
1953-07-28T00:00:00Z,10051,Hidefumi,M,1992-10-15T00:00:00Z,3,Caine,58121
1961-02-26T00:00:00Z,10052,Heping,M,1988-05-21T00:00:00Z,1,Nitsch,55360
1954-09-13T00:00:00Z,10053,Sanjiv,F,1986-02-04T00:00:00Z,3,Zschoche,54462
1957-04-04T00:00:00Z,10054,Mayumi,M,1995-03-13T00:00:00Z,4,Schueller,65367
1956-06-06T00:00:00Z,10055,Georgy,M,1992-04-27T00:00:00Z,5,Dredge,49281
1961-09-01T00:00:00Z,10056,Brendon,F,1990-02-01T00:00:00Z,2,Bernini,33370
1954-05-30T00:00:00Z,10057,Ebbe,F,1992-01-15T00:00:00Z,4,Callaway,27215
1954-10-01T00:00:00Z,10058,Berhard,M,1987-04-13T00:00:00Z,3,McFarlin,38376
1953-09-19T00:00:00Z,10059,Alejandro,F,1991-06-26T00:00:00Z,2,McAlpine,44307
1961-10-15T00:00:00Z,10060,Breannda,M,1987-11-02T00:00:00Z,2,Billingsley,29175
1962-10-19T00:00:00Z,10061,Tse,M,1985-09-17T00:00:00Z,1,Herber,49095
1961-11-02T00:00:00Z,10062,Anoosh,M,1991-08-30T00:00:00Z,3,Peyn,65030
1952-08-06T00:00:00Z,10063,Gino,F,1989-04-08T00:00:00Z,3,Leonhardt,52121
1959-04-07T00:00:00Z,10064,Udi,M,1985-11-20T00:00:00Z,5,Jansch,33956
1963-04-14T00:00:00Z,10065,Satosi,M,1988-05-18T00:00:00Z,2,Awdeh,50249
1952-11-13T00:00:00Z,10066,Kwee,M,1986-02-26T00:00:00Z,5,Schusler,31897
1953-01-07T00:00:00Z,10067,Claudi,M,1987-03-04T00:00:00Z,2,Stavenow,52044
1962-11-26T00:00:00Z,10068,Charlene,M,1987-08-07T00:00:00Z,3,Brattka,28941
1960-09-06T00:00:00Z,10069,Margareta,F,1989-11-05T00:00:00Z,5,Bierman,41933
1955-08-20T00:00:00Z,10070,Reuven,M,1985-10-14T00:00:00Z,3,Garigliano,54329
1958-01-21T00:00:00Z,10071,Hisao,M,1987-10-01T00:00:00Z,2,Lipner,40612
1952-05-15T00:00:00Z,10072,Hironoby,F,1988-07-21T00:00:00Z,5,Sidou,54518
1954-02-23T00:00:00Z,10073,Shir,M,1991-12-01T00:00:00Z,4,McClurg,32568
1955-08-28T00:00:00Z,10074,Mokhtar,F,1990-08-13T00:00:00Z,5,Bernatsky,38992
1960-03-09T00:00:00Z,10075,Gao,F,1987-03-19T00:00:00Z,5,Dolinsky,51956
1952-06-13T00:00:00Z,10076,Erez,F,1985-07-09T00:00:00Z,3,Ritzmann,62405
1964-04-18T00:00:00Z,10077,Mona,M,1990-03-02T00:00:00Z,5,Azuma,46595
1959-12-25T00:00:00Z,10078,Danel,F,1987-05-26T00:00:00Z,2,Mondadori,69904
1961-10-05T00:00:00Z,10079,Kshitij,F,1986-03-27T00:00:00Z,2,Gils,32263
1957-12-03T00:00:00Z,10080,Premal,M,1985-11-19T00:00:00Z,5,Baek,52833
1960-12-17T00:00:00Z,10081,Zhongwei,M,1986-10-30T00:00:00Z,2,Rosen,50128
1963-09-09T00:00:00Z,10082,Parviz,M,1990-01-03T00:00:00Z,4,Lortz,49818
1959-07-23T00:00:00Z,10083,Vishv,M,1987-03-31T00:00:00Z,1,Zockler,39110
1960-05-25T00:00:00Z,10084,Tuval,M,1995-12-15T00:00:00Z,1,Kalloufi,28035
1962-11-07T00:00:00Z,10085,Kenroku,M,1994-04-09T00:00:00Z,5,Malabarba,35742
1962-11-19T00:00:00Z,10086,Somnath,M,1990-02-16T00:00:00Z,1,Foote,68547
1959-07-23T00:00:00Z,10087,Xinglin,F,1986-09-08T00:00:00Z,5,Eugenio,32272
1954-02-25T00:00:00Z,10088,Jungsoon,F,1988-09-02T00:00:00Z,5,Syrzycki,39638
1963-03-21T00:00:00Z,10089,Sudharsan,F,1986-08-12T00:00:00Z,4,Flasterstein,43602
1961-05-30T00:00:00Z,10090,Kendra,M,1986-03-14T00:00:00Z,2,Hofting,44956
1955-10-04T00:00:00Z,10091,Amabile,M,1992-11-18T00:00:00Z,3,Gomatam,38645
1964-10-18T00:00:00Z,10092,Valdiodio,F,1989-09-22T00:00:00Z,1,Niizuma,25976
1964-06-11T00:00:00Z,10093,Sailaja,M,1996-11-05T00:00:00Z,3,Desikan,45656
1957-05-25T00:00:00Z,10094,Arumugam,F,1987-04-18T00:00:00Z,5,Ossenbruggen,66817
1965-01-03T00:00:00Z,10095,Hilari,M,1986-07-15T00:00:00Z,4,Morton,37702
1954-09-16T00:00:00Z,10096,Jayson,M,1990-01-14T00:00:00Z,4,Mandell,43889
1952-02-27T00:00:00Z,10097,Remzi,M,1990-09-15T00:00:00Z,3,Waschkowski,71165
1961-09-23T00:00:00Z,10098,Sreekrishna,F,1985-05-13T00:00:00Z,4,Servieres,44817
1956-05-25T00:00:00Z,10099,Valter,F,1988-10-18T00:00:00Z,2,Sullins,73578
1953-04-21T00:00:00Z,10100,Hironobu,F,1987-09-21T00:00:00Z,4,Haraldson,68431

1 birth_date emp_no first_name gender hire_date languages last_name salary
2 1953-09-02T00:00:00Z 10001 Georgi M 1986-06-26T00:00:00Z 2 Facello 57305
3 1964-06-02T00:00:00Z 10002 Bezalel F 1985-11-21T00:00:00Z 5 Simmel 56371
4 1959-12-03T00:00:00Z 10003 Parto M 1986-08-28T00:00:00Z 4 Bamford 61805
5 1954-05-01T00:00:00Z 10004 Chirstian M 1986-12-01T00:00:00Z 5 Koblick 36174
6 1955-01-21T00:00:00Z 10005 Kyoichi M 1989-09-12T00:00:00Z 1 Maliniak 63528
7 1953-04-20T00:00:00Z 10006 Anneke F 1989-06-02T00:00:00Z 3 Preusig 60335
8 1957-05-23T00:00:00Z 10007 Tzvetan F 1989-02-10T00:00:00Z 4 Zielinski 74572
9 1958-02-19T00:00:00Z 10008 Saniya M 1994-09-15T00:00:00Z 2 Kalloufi 43906
10 1952-04-19T00:00:00Z 10009 Sumant F 1985-02-18T00:00:00Z 1 Peac 66174
11 1963-06-01T00:00:00Z 10010 Duangkaew F 1989-08-24T00:00:00Z 4 Piveteau 45797
12 1953-11-07T00:00:00Z 10011 Mary F 1990-01-22T00:00:00Z 5 Sluis 31120
13 1960-10-04T00:00:00Z 10012 Patricio M 1992-12-18T00:00:00Z 5 Bridgland 48942
14 1963-06-07T00:00:00Z 10013 Eberhardt M 1985-10-20T00:00:00Z 1 Terkki 48735
15 1956-02-12T00:00:00Z 10014 Berni M 1987-03-11T00:00:00Z 5 Genin 37137
16 1959-08-19T00:00:00Z 10015 Guoxiang M 1987-07-02T00:00:00Z 5 Nooteboom 25324
17 1961-05-02T00:00:00Z 10016 Kazuhito M 1995-01-27T00:00:00Z 2 Cappelletti 61358
18 1958-07-06T00:00:00Z 10017 Cristinel F 1993-08-03T00:00:00Z 2 Bouloucos 58715
19 1954-06-19T00:00:00Z 10018 Kazuhide F 1987-04-03T00:00:00Z 2 Peha 56760
20 1953-01-23T00:00:00Z 10019 Lillian M 1999-04-30T00:00:00Z 1 Haddadi 73717
21 1952-12-24T00:00:00Z 10020 Mayuko M 1991-01-26T00:00:00Z 3 Warwick 40031
22 1960-02-20T00:00:00Z 10021 Ramzi M 1988-02-10T00:00:00Z 5 Erde 60408
23 1952-07-08T00:00:00Z 10022 Shahaf M 1995-08-22T00:00:00Z 3 Famili 48233
24 1953-09-29T00:00:00Z 10023 Bojan F 1989-12-17T00:00:00Z 2 Montemayor 47896
25 1958-09-05T00:00:00Z 10024 Suzette F 1997-05-19T00:00:00Z 3 Pettey 64675
26 1958-10-31T00:00:00Z 10025 Prasadram M 1987-08-17T00:00:00Z 5 Heyers 47411
27 1953-04-03T00:00:00Z 10026 Yongqiao M 1995-03-20T00:00:00Z 3 Berztiss 28336
28 1962-07-10T00:00:00Z 10027 Divier F 1989-07-07T00:00:00Z 5 Reistad 73851
29 1963-11-26T00:00:00Z 10028 Domenick M 1991-10-22T00:00:00Z 1 Tempesti 39356
30 1956-12-13T00:00:00Z 10029 Otmar M 1985-11-20T00:00:00Z 3 Herbst 74999
31 1958-07-14T00:00:00Z 10030 Elvis M 1994-02-17T00:00:00Z 3 Demeyer 67492
32 1959-01-27T00:00:00Z 10031 Karsten M 1991-09-01T00:00:00Z 4 Joslin 37716
33 1960-08-09T00:00:00Z 10032 Jeong F 1990-06-20T00:00:00Z 3 Reistad 62233
34 1956-11-14T00:00:00Z 10033 Arif M 1987-03-18T00:00:00Z 1 Merlo 70011
35 1962-12-29T00:00:00Z 10034 Bader M 1988-09-21T00:00:00Z 1 Swan 39878
36 1953-02-08T00:00:00Z 10035 Alain M 1988-09-05T00:00:00Z 5 Chappelet 25945
37 1959-08-10T00:00:00Z 10036 Adamantios M 1992-01-03T00:00:00Z 4 Portugali 60781
38 1963-07-22T00:00:00Z 10037 Pradeep M 1990-12-05T00:00:00Z 2 Makrucki 37691
39 1960-07-20T00:00:00Z 10038 Huan M 1989-09-20T00:00:00Z 4 Lortz 35222
40 1959-10-01T00:00:00Z 10039 Alejandro M 1988-01-19T00:00:00Z 2 Brender 36051
41 1959-09-13T00:00:00Z 10040 Weiyi F 1993-02-14T00:00:00Z 4 Meriste 37112
42 1959-08-27T00:00:00Z 10041 Uri F 1989-11-12T00:00:00Z 1 Lenart 56415
43 1956-02-26T00:00:00Z 10042 Magy F 1993-03-21T00:00:00Z 3 Stamatiou 30404
44 1960-09-19T00:00:00Z 10043 Yishay M 1990-10-20T00:00:00Z 1 Tzvieli 34341
45 1961-09-21T00:00:00Z 10044 Mingsen F 1994-05-21T00:00:00Z 1 Casley 39728
46 1957-08-14T00:00:00Z 10045 Moss M 1989-09-02T00:00:00Z 3 Shanbhogue 74970
47 1960-07-23T00:00:00Z 10046 Lucien M 1992-06-20T00:00:00Z 4 Rosenbaum 50064
48 1952-06-29T00:00:00Z 10047 Zvonko M 1989-03-31T00:00:00Z 4 Nyanchama 42716
49 1963-07-11T00:00:00Z 10048 Florian M 1985-02-24T00:00:00Z 3 Syrotiuk 26436
50 1961-04-24T00:00:00Z 10049 Basil F 1992-05-04T00:00:00Z 5 Tramer 37853
51 1958-05-21T00:00:00Z 10050 Yinghua M 1990-12-25T00:00:00Z 2 Dredge 43026
52 1953-07-28T00:00:00Z 10051 Hidefumi M 1992-10-15T00:00:00Z 3 Caine 58121
53 1961-02-26T00:00:00Z 10052 Heping M 1988-05-21T00:00:00Z 1 Nitsch 55360
54 1954-09-13T00:00:00Z 10053 Sanjiv F 1986-02-04T00:00:00Z 3 Zschoche 54462
55 1957-04-04T00:00:00Z 10054 Mayumi M 1995-03-13T00:00:00Z 4 Schueller 65367
56 1956-06-06T00:00:00Z 10055 Georgy M 1992-04-27T00:00:00Z 5 Dredge 49281
57 1961-09-01T00:00:00Z 10056 Brendon F 1990-02-01T00:00:00Z 2 Bernini 33370
58 1954-05-30T00:00:00Z 10057 Ebbe F 1992-01-15T00:00:00Z 4 Callaway 27215
59 1954-10-01T00:00:00Z 10058 Berhard M 1987-04-13T00:00:00Z 3 McFarlin 38376
60 1953-09-19T00:00:00Z 10059 Alejandro F 1991-06-26T00:00:00Z 2 McAlpine 44307
61 1961-10-15T00:00:00Z 10060 Breannda M 1987-11-02T00:00:00Z 2 Billingsley 29175
62 1962-10-19T00:00:00Z 10061 Tse M 1985-09-17T00:00:00Z 1 Herber 49095
63 1961-11-02T00:00:00Z 10062 Anoosh M 1991-08-30T00:00:00Z 3 Peyn 65030
64 1952-08-06T00:00:00Z 10063 Gino F 1989-04-08T00:00:00Z 3 Leonhardt 52121
65 1959-04-07T00:00:00Z 10064 Udi M 1985-11-20T00:00:00Z 5 Jansch 33956
66 1963-04-14T00:00:00Z 10065 Satosi M 1988-05-18T00:00:00Z 2 Awdeh 50249
67 1952-11-13T00:00:00Z 10066 Kwee M 1986-02-26T00:00:00Z 5 Schusler 31897
68 1953-01-07T00:00:00Z 10067 Claudi M 1987-03-04T00:00:00Z 2 Stavenow 52044
69 1962-11-26T00:00:00Z 10068 Charlene M 1987-08-07T00:00:00Z 3 Brattka 28941
70 1960-09-06T00:00:00Z 10069 Margareta F 1989-11-05T00:00:00Z 5 Bierman 41933
71 1955-08-20T00:00:00Z 10070 Reuven M 1985-10-14T00:00:00Z 3 Garigliano 54329
72 1958-01-21T00:00:00Z 10071 Hisao M 1987-10-01T00:00:00Z 2 Lipner 40612
73 1952-05-15T00:00:00Z 10072 Hironoby F 1988-07-21T00:00:00Z 5 Sidou 54518
74 1954-02-23T00:00:00Z 10073 Shir M 1991-12-01T00:00:00Z 4 McClurg 32568
75 1955-08-28T00:00:00Z 10074 Mokhtar F 1990-08-13T00:00:00Z 5 Bernatsky 38992
76 1960-03-09T00:00:00Z 10075 Gao F 1987-03-19T00:00:00Z 5 Dolinsky 51956
77 1952-06-13T00:00:00Z 10076 Erez F 1985-07-09T00:00:00Z 3 Ritzmann 62405
78 1964-04-18T00:00:00Z 10077 Mona M 1990-03-02T00:00:00Z 5 Azuma 46595
79 1959-12-25T00:00:00Z 10078 Danel F 1987-05-26T00:00:00Z 2 Mondadori 69904
80 1961-10-05T00:00:00Z 10079 Kshitij F 1986-03-27T00:00:00Z 2 Gils 32263
81 1957-12-03T00:00:00Z 10080 Premal M 1985-11-19T00:00:00Z 5 Baek 52833
82 1960-12-17T00:00:00Z 10081 Zhongwei M 1986-10-30T00:00:00Z 2 Rosen 50128
83 1963-09-09T00:00:00Z 10082 Parviz M 1990-01-03T00:00:00Z 4 Lortz 49818
84 1959-07-23T00:00:00Z 10083 Vishv M 1987-03-31T00:00:00Z 1 Zockler 39110
85 1960-05-25T00:00:00Z 10084 Tuval M 1995-12-15T00:00:00Z 1 Kalloufi 28035
86 1962-11-07T00:00:00Z 10085 Kenroku M 1994-04-09T00:00:00Z 5 Malabarba 35742
87 1962-11-19T00:00:00Z 10086 Somnath M 1990-02-16T00:00:00Z 1 Foote 68547
88 1959-07-23T00:00:00Z 10087 Xinglin F 1986-09-08T00:00:00Z 5 Eugenio 32272
89 1954-02-25T00:00:00Z 10088 Jungsoon F 1988-09-02T00:00:00Z 5 Syrzycki 39638
90 1963-03-21T00:00:00Z 10089 Sudharsan F 1986-08-12T00:00:00Z 4 Flasterstein 43602
91 1961-05-30T00:00:00Z 10090 Kendra M 1986-03-14T00:00:00Z 2 Hofting 44956
92 1955-10-04T00:00:00Z 10091 Amabile M 1992-11-18T00:00:00Z 3 Gomatam 38645
93 1964-10-18T00:00:00Z 10092 Valdiodio F 1989-09-22T00:00:00Z 1 Niizuma 25976
94 1964-06-11T00:00:00Z 10093 Sailaja M 1996-11-05T00:00:00Z 3 Desikan 45656
95 1957-05-25T00:00:00Z 10094 Arumugam F 1987-04-18T00:00:00Z 5 Ossenbruggen 66817
96 1965-01-03T00:00:00Z 10095 Hilari M 1986-07-15T00:00:00Z 4 Morton 37702
97 1954-09-16T00:00:00Z 10096 Jayson M 1990-01-14T00:00:00Z 4 Mandell 43889
98 1952-02-27T00:00:00Z 10097 Remzi M 1990-09-15T00:00:00Z 3 Waschkowski 71165
99 1961-09-23T00:00:00Z 10098 Sreekrishna F 1985-05-13T00:00:00Z 4 Servieres 44817
100 1956-05-25T00:00:00Z 10099 Valter F 1988-10-18T00:00:00Z 2 Sullins 73578
101 1953-04-21T00:00:00Z 10100 Hironobu F 1987-09-21T00:00:00Z 4 Haraldson 68431

View File

@ -1,9 +1,12 @@
DROP TABLE IF EXISTS "test_emp";
CREATE TABLE "test_emp" ("birth_date" TIMESTAMP WITH TIME ZONE,
"emp_no" INT,
"first_name" VARCHAR(50),
"gender" VARCHAR(1),
"hire_date" TIMESTAMP WITH TIME ZONE,
"last_name" VARCHAR(50)
CREATE TABLE "test_emp" (
"birth_date" TIMESTAMP WITH TIME ZONE,
"emp_no" INT,
"first_name" VARCHAR(50),
"gender" VARCHAR(1),
"hire_date" TIMESTAMP WITH TIME ZONE,
"languages" TINYINT,
"last_name" VARCHAR(50),
"salary" INT
)
AS SELECT * FROM CSVREAD('classpath:/employees.csv');

View File

@ -14,8 +14,6 @@ import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.AttributeSet;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.ExpressionIdGenerator;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
@ -31,7 +29,6 @@ import org.elasticsearch.xpack.sql.expression.function.FunctionDefinition;
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
import org.elasticsearch.xpack.sql.expression.function.Functions;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.function.aggregate.Count;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.ArithmeticFunction;
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
@ -44,13 +41,13 @@ import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
import org.elasticsearch.xpack.sql.plan.logical.Project;
import org.elasticsearch.xpack.sql.plan.logical.SubQueryAlias;
import org.elasticsearch.xpack.sql.plan.logical.UnaryPlan;
import org.elasticsearch.xpack.sql.plan.logical.UnresolvedRelation;
import org.elasticsearch.xpack.sql.plan.logical.With;
import org.elasticsearch.xpack.sql.rule.Rule;
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
import org.elasticsearch.xpack.sql.session.SqlSession;
import org.elasticsearch.xpack.sql.tree.Node;
import org.elasticsearch.xpack.sql.tree.NodeUtils;
import org.elasticsearch.xpack.sql.type.CompoundDataType;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
@ -161,7 +158,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
if (attribute instanceof NestedFieldAttribute) {
// since u might be unqualified but the parent shows up as a qualifier
if (Objects.equals(u.qualifiedName(), attribute.name())) {
matches.add(attribute);
matches.add(attribute.withLocation(u.location()));
}
}
}
@ -172,7 +169,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
if (matches.isEmpty()) {
for (Attribute attribute : attrList) {
if (!attribute.synthetic() && Objects.equals(u.name(), attribute.name())) {
matches.add(attribute);
matches.add(attribute.withLocation(u.location()));
}
}
}
@ -205,7 +202,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
private static boolean containsAggregate(List<? extends Expression> list) {
return Expressions.anyMatchInList(list, Functions::isAggregateFunction);
return Expressions.anyMatch(list, Functions::isAggregate);
}
private static boolean containsAggregate(Expression exp) {
@ -213,7 +210,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
private class CTESubstitution extends AnalyzeRule<With> {
private static class CTESubstitution extends AnalyzeRule<With> {
@Override
protected LogicalPlan rule(With plan) {
@ -277,7 +274,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
}
private class ResolveRefs extends AnalyzeRule<LogicalPlan> {
private static class ResolveRefs extends AnalyzeRule<LogicalPlan> {
@Override
protected LogicalPlan rule(LogicalPlan plan) {
@ -351,8 +348,8 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
UnresolvedAttribute u = (UnresolvedAttribute) e;
NamedExpression named = resolveAgainstList(u,
plan.children().stream()
.flatMap(c -> c.output().stream())
.collect(toList()),
.flatMap(c -> c.output().stream())
.collect(toList()),
false);
// if resolved, return it; otherwise keep it in place to be resolved later
if (named != null) {
@ -378,38 +375,38 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
// check if there's a qualifier
// no - means only top-level
// it is - return only that level
if (e instanceof UnresolvedStar) {
List<Attribute> output = child.output();
UnresolvedStar us = (UnresolvedStar) e;
if (e instanceof UnresolvedStar) {
List<Attribute> output = child.output();
UnresolvedStar us = (UnresolvedStar) e;
Stream<Attribute> stream = output.stream();
Stream<Attribute> stream = output.stream();
if (us.qualifier() == null) {
stream = stream.filter(a -> !(a instanceof NestedFieldAttribute));
}
// if there's a qualifier, inspect that level
if (us.qualifier() != null) {
// qualifier is selected, need to resolve that first.
Attribute qualifier = resolveAgainstList(us.qualifier(), output, false);
stream = stream.filter(a -> (a instanceof NestedFieldAttribute)
&& Objects.equals(a.qualifier(), qualifier.qualifier())
&& Objects.equals(((NestedFieldAttribute) a).parentPath(), qualifier.name()));
}
return stream.filter(a -> !(a.dataType() instanceof CompoundDataType));
if (us.qualifier() == null) {
stream = stream.filter(a -> !(a instanceof NestedFieldAttribute));
}
else if (e instanceof UnresolvedAlias) {
UnresolvedAlias ua = (UnresolvedAlias) e;
if (ua.child() instanceof UnresolvedStar) {
return child.output().stream();
}
return Stream.of(e);
// if there's a qualifier, inspect that level
if (us.qualifier() != null) {
// qualifier is selected, need to resolve that first.
Attribute qualifier = resolveAgainstList(us.qualifier(), output, false);
stream = stream.filter(a -> (a instanceof NestedFieldAttribute)
&& Objects.equals(a.qualifier(), qualifier.qualifier())
&& Objects.equals(((NestedFieldAttribute) a).parentPath(), qualifier.name()));
}
return stream.filter(a -> !(a.dataType() instanceof CompoundDataType));
}
else if (e instanceof UnresolvedAlias) {
UnresolvedAlias ua = (UnresolvedAlias) e;
if (ua.child() instanceof UnresolvedStar) {
return child.output().stream();
}
return Stream.of(e);
})
.map(NamedExpression.class::cast)
.collect(toList());
}
return Stream.of(e);
})
.map(NamedExpression.class::cast)
.collect(toList());
}
// generate a new (right) logical plan with different IDs for all conflicting attributes
@ -426,7 +423,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
// Allow ordinal positioning in order/sort by (quite useful when dealing with aggs)
// Note that ordering starts at 1
private class ResolveOrdinalInOrderByAndGroupBy extends AnalyzeRule<LogicalPlan> {
private static class ResolveOrdinalInOrderByAndGroupBy extends AnalyzeRule<LogicalPlan> {
@Override
protected boolean skipResolved() {
@ -517,12 +514,22 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
}
// In some SQL dialects it is valid to filter or sort by attributes not present in the SELECT clause.
// It is valid to filter (including HAVING) or sort by attributes not present in the SELECT clause.
// This rule pushed down the attributes for them to be resolved then projects them away.
// As such this rule is an extended version of ResolveRefs
private class ResolveMissingRefs extends AnalyzeRule<LogicalPlan> {
private static class ResolveMissingRefs extends AnalyzeRule<LogicalPlan> {
private static class AggGroupingFailure {
final List<String> expectedGrouping;
private AggGroupingFailure(List<String> expectedGrouping) {
this.expectedGrouping = expectedGrouping;
}
}
@Override
protected LogicalPlan rule(LogicalPlan plan) {
if (plan instanceof OrderBy && !plan.resolved() && plan.childrenResolved()) {
OrderBy o = (OrderBy) plan;
List<Order> maybeResolved = o.order().stream()
@ -530,17 +537,34 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
.collect(toList());
AttributeSet resolvedRefs = Expressions.references(maybeResolved.stream()
.filter(Expression::resolved)
.collect(toList()));
.filter(Expression::resolved)
.collect(toList()));
AttributeSet missing = resolvedRefs.substract(o.child().outputSet());
if (!missing.isEmpty()) {
// Add missing attributes but project them away afterwards
return new Project(o.location(),
new OrderBy(o.location(), propagateMissing(o.child(), missing), maybeResolved),
o.child().output());
List<Attribute> failedAttrs = new ArrayList<>();
LogicalPlan newChild = propagateMissing(o.child(), missing, failedAttrs);
// resolution failed and the failed expressions might contain resolution information so copy it over
if (!failedAttrs.isEmpty()) {
List<Order> newOrders = new ArrayList<>();
// transform the orders with the failed information
for (Order order : o.order()) {
Order transformed = (Order) order.transformUp(ua -> resolveMetadataToMessage(ua, failedAttrs, "order"),
UnresolvedAttribute.class);
newOrders.add(order.equals(transformed) ? order : transformed);
}
return o.order().equals(newOrders) ? o : new OrderBy(o.location(), o.child(), newOrders);
}
// everything worked
return new Project(o.location(), new OrderBy(o.location(), newChild, maybeResolved), o.child().output());
}
if (!maybeResolved.equals(o.order())) {
return new OrderBy(o.location(), o.child(), maybeResolved);
}
@ -550,16 +574,28 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
Filter f = (Filter) plan;
Expression maybeResolved = tryResolveExpression(f.condition(), f.child());
AttributeSet resolvedRefs = new AttributeSet(maybeResolved.references().stream()
.filter(Expression::resolved)
.collect(toList()));
.filter(Expression::resolved)
.collect(toList()));
AttributeSet missing = resolvedRefs.substract(f.child().outputSet());
if (!missing.isEmpty()) {
// Again, add missing attributes and project them away
return new Project(f.location(),
new Filter(f.location(), propagateMissing(f.child(), missing), maybeResolved),
f.child().output());
List<Attribute> failedAttrs = new ArrayList<>();
LogicalPlan newChild = propagateMissing(f.child(), missing, failedAttrs);
// resolution failed and the failed expressions might contain resolution information so copy it over
if (!failedAttrs.isEmpty()) {
// transform the orders with the failed information
Expression transformed = f.condition().transformUp(ua -> resolveMetadataToMessage(ua, failedAttrs, "filter"),
UnresolvedAttribute.class);
return f.condition().equals(transformed) ? f : new Filter(f.location(), f.child(), transformed);
}
return new Project(f.location(), new Filter(f.location(), newChild, maybeResolved), f.child().output());
}
if (!maybeResolved.equals(f.condition())) {
return new Filter(f.location(), f.child(), maybeResolved);
}
@ -568,7 +604,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
return plan;
}
private <E extends Expression> E tryResolveExpression(E exp, LogicalPlan plan) {
static <E extends Expression> E tryResolveExpression(E exp, LogicalPlan plan) {
E resolved = resolveExpression(exp, plan, true);
if (!resolved.resolved()) {
// look at unary trees but ignore subqueries
@ -580,36 +616,64 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
private LogicalPlan propagateMissing(LogicalPlan logicalPlan, AttributeSet missing) {
private static LogicalPlan propagateMissing(LogicalPlan plan, AttributeSet missing, List<Attribute> failed) {
// no more attributes, bail out
if (missing.isEmpty()) {
return logicalPlan;
return plan;
}
return logicalPlan.transformDown(plan -> {
if (plan instanceof Project) {
Project p = (Project) plan;
AttributeSet diff = missing.substract(p.child().outputSet());
return new Project(p.location(), propagateMissing(p.child(), diff), combine(p.projections(), missing));
}
if (plan instanceof Project) {
Project p = (Project) plan;
AttributeSet diff = missing.substract(p.child().outputSet());
return new Project(p.location(), propagateMissing(p.child(), diff, failed), combine(p.projections(), missing));
}
if (plan instanceof Aggregate) {
Aggregate a = (Aggregate) plan;
// missing attributes can only be grouping expressions
for (Attribute m : missing) {
// but we don't can't add an agg if the group is missing
if (!Expressions.anyMatchInList(a.groupings(), g -> g.canonicalEquals(m))) {
// we cannot propagate the missing attribute, bail out
//throw new AnalysisException(logicalPlan, "Cannot add missing attribute %s to %s", m.name(), plan);
return plan;
if (plan instanceof Aggregate) {
Aggregate a = (Aggregate) plan;
// missing attributes can only be grouping expressions
for (Attribute m : missing) {
// but we don't can't add an agg if the group is missing
if (!Expressions.anyMatch(a.groupings(), m::semanticEquals)) {
if (m instanceof Attribute) {
// pass failure information to help the verifier
m = new UnresolvedAttribute(m.location(), m.name(), m.qualifier(), null, null,
new AggGroupingFailure(Expressions.names(a.groupings())));
}
failed.add(m);
}
}
// propagation failed, return original plan
if (!failed.isEmpty()) {
return plan;
}
return new Aggregate(a.location(), a.child(), a.groupings(), combine(a.aggregates(), missing));
}
// LeafPlans are tables and BinaryPlans are joins so pushing can only happen on unary
if (plan instanceof UnaryPlan) {
return plan.replaceChildren(singletonList(propagateMissing(((UnaryPlan) plan).child(), missing, failed)));
}
failed.addAll(missing);
return plan;
}
private static UnresolvedAttribute resolveMetadataToMessage(UnresolvedAttribute ua, List<Attribute> attrs, String actionName) {
for (Attribute attr : attrs) {
if (ua.resolutionMetadata() == null && attr.name().equals(ua.name())) {
if (attr instanceof UnresolvedAttribute) {
UnresolvedAttribute fua = (UnresolvedAttribute) attr;
Object metadata = fua.resolutionMetadata();
if (metadata instanceof AggGroupingFailure) {
List<String> names = ((AggGroupingFailure) metadata).expectedGrouping;
return ua.withUnresolvedMessage(
"Cannot " + actionName + " by non-grouped column [" + ua.qualifiedName() + "], expected " + names);
}
}
return new Aggregate(a.location(), a.child(), a.groupings(), combine(a.aggregates(), missing));
}
return plan;
});
}
}
return ua;
};
}
// to avoid creating duplicate functions
@ -709,14 +773,14 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
private List<Function> getList(Map<String, List<Function>> seen, String name) {
List<Function> list = seen.get(name);
if (list == null) {
list = new ArrayList<Function>();
list = new ArrayList<>();
seen.put(name, list);
}
return list;
}
}
private class ResolveAliases extends AnalyzeRule<LogicalPlan> {
private static class ResolveAliases extends AnalyzeRule<LogicalPlan> {
@Override
protected LogicalPlan rule(LogicalPlan plan) {
@ -773,7 +837,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
//
// Replace a project with aggregation into an aggregation
//
private class ProjectedAggregations extends AnalyzeRule<Project> {
private static class ProjectedAggregations extends AnalyzeRule<Project> {
@Override
protected LogicalPlan rule(Project p) {
@ -785,12 +849,16 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
};
//
// Handle aggs in HAVING and ORDER BY clause. In both cases, the function argument
// cannot be resolved being masked by the subplan; to get around it the expression is pushed down
// and then projected.
// Handle aggs in HAVING and ORDER BY clause. To help folding any aggs not found in Aggregation
// will be pushed down to the Aggregate and then projected. This also simplifies the Verifier's job.
//
private class ResolveAggsInHavingAndOrderBy extends AnalyzeRule<LogicalPlan> {
@Override
protected boolean skipResolved() {
return false;
}
@Override
protected LogicalPlan rule(LogicalPlan plan) {
// HAVING = Filter followed by an Agg
@ -799,50 +867,38 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
if (f.child() instanceof Aggregate && f.child().resolved()) {
Aggregate agg = (Aggregate) f.child();
// first try to see whether the agg contains the functions in the filter
// and if so point the aliases to them
Set<NamedExpression> missing = null;
Expression condition = f.condition();
Map<Function, Attribute> resolvedFunc = new LinkedHashMap<>();
for (NamedExpression ne : agg.aggregates()) {
if (ne instanceof Alias) {
Alias as = (Alias) ne;
if (as.child() instanceof Function) {
resolvedFunc.put((Function) as.child(), as.toAttribute());
}
// the condition might contain an agg (AVG(salary)) that could have been resolved
// (salary cannot be pushed down to Aggregate since there's no grouping and thus the function wasn't resolved either)
// so try resolving the condition in one go through a 'dummy' aggregate
if (!condition.resolved()) {
// that's why try to resolve the condition
Aggregate tryResolvingCondition = new Aggregate(agg.location(), agg.child(), agg.groupings(),
singletonList(new Alias(f.location(), ".having", condition)));
LogicalPlan conditionResolved = analyze(tryResolvingCondition, false);
// if it got resolved
if (conditionResolved.resolved()) {
// replace the condition with the resolved one
condition = ((Alias) ((Aggregate) conditionResolved).aggregates().get(0)).child();
} else {
// else bail out
return plan;
}
}
if (resolvedFunc.isEmpty()) {
return plan;
missing = findMissingAggregate(agg, condition);
if (!missing.isEmpty()) {
Aggregate newAgg = new Aggregate(agg.location(), agg.child(), agg.groupings(), combine(agg.aggregates(), missing));
Filter newFilter = new Filter(f.location(), newAgg, condition);
// preserve old output
return new Project(f.location(), newFilter, f.output());
}
Expression resolvedCondition = f.condition().transformUp(exp -> {
if (!(exp instanceof Function)) {
return exp;
}
Function func = (Function) exp;
Function match = null;
if (!func.resolved()) {
// if it not resolved try to resolve the arguments
match = resolveFunction(func, resolvedFunc.keySet());
}
else {
// make sure to eliminate total count (it does not make sense to condition by it or does it?)
if (isTotalCount(func)) {
throw new AnalysisException(f, "Global/Total count cannot be used inside the HAVING clause");
}
// if it is resolved, find its equal
match = findResolved(func, resolvedFunc.keySet());
}
return match != null ? resolvedFunc.get(match) : exp;
});
if (resolvedCondition != f.condition()) {
return new Filter(f.location(), f.child(), resolvedCondition);
}
// through an exception instead of full analysis
}
return plan;
}
@ -850,118 +906,20 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
return plan;
}
private boolean isTotalCount(Function f) {
if (f instanceof Count) {
Count c = (Count) f;
if (!c.distinct()) {
if (c.field() instanceof Literal && c.field().dataType().isInteger()) {
return true;
}
private Set<NamedExpression> findMissingAggregate(Aggregate target, Expression from) {
Set<NamedExpression> missing = new LinkedHashSet<>();
for (Expression filterAgg : from.collect(Functions::isAggregate)) {
if (!Expressions.anyMatch(target.aggregates(),
a -> {
Attribute attr = Expressions.attribute(a);
return attr != null && attr.semanticEquals(Expressions.attribute(filterAgg));
})) {
missing.add(Expressions.wrapAsNamed(filterAgg));
}
}
return false;
}
private Function resolveFunction(Function func, Collection<Function> resolvedFunc) {
for (Function rf : resolvedFunc) {
if (rf.name().equals(func.name()) && rf.arguments().size() == func.arguments().size()) {
for (int i = 0; i < func.arguments().size(); i++) {
Expression arg = func.arguments().get(i);
Expression resolvedArg = rf.arguments().get(i);
// try to resolve the arg based on the function
if (arg instanceof UnresolvedAttribute && resolvedArg instanceof NamedExpression) {
UnresolvedAttribute u = (UnresolvedAttribute) arg;
NamedExpression named = resolveAgainstList(u,
singletonList(((NamedExpression) resolvedArg).toAttribute()),
true);
if (named == null) {
break;
}
}
else {
if (!arg.canonicalEquals(rf)) {
break;
}
}
}
// found a match
return rf;
}
}
return null;
}
private Function findResolved(Function func, Collection<Function> resolvedFunc) {
for (Function f : resolvedFunc) {
if (f.canonicalEquals(func)) {
return f;
}
}
return null;
}
// rule to add an agg from a filter to the aggregate below it
// suitable when doing filtering with extra aggs:
// SELECT SUM(g) FROM ... GROUP BY g HAVING AVG(g) > 10
// AVG is used for filtering but it's not declared on the Aggregate
private LogicalPlan fullAnalysis(Filter f, Aggregate agg) {
try {
// try to resolve the expression separately
// first, wrap it in an alias
ExpressionId newId = ExpressionIdGenerator.newId();
Alias wrappedCondition = new Alias(f.location(), "generated#" + newId, null, f.condition(), newId, true);
// pass it to an Agg
Aggregate attempt = new Aggregate(agg.location(), agg.child(), agg.groupings(), singletonList(wrappedCondition));
// then try resolving it
Aggregate analyzedAgg = (Aggregate) execute(attempt);
NamedExpression analyzedCondition = analyzedAgg.aggregates().get(0);
// everything was resolved
if (analyzedCondition.resolved()) {
List<NamedExpression> resolvedAggExp = new ArrayList<>();
// break down the expression into parts
Expression analyzedFilterCondition = analyzedCondition.transformDown(exp -> {
NamedExpression named = null;
if (exp instanceof Function) {
Function func = (Function) exp;
named = new Alias(func.location(), func.name(), func);
}
// if the grouping appears in the filter (and it is not exported out)
if (Expressions.anyMatchInList(agg.groupings(), g -> g.canonicalEquals(exp))
&& !Expressions.anyMatchInList(agg.output(), o -> o.canonicalEquals(exp))) {
named = exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.location(), exp.nodeName(), exp);
}
if (named != null) {
resolvedAggExp.add(named);
return named.toAttribute();
}
return exp;
});
// Replace the resolved expression
if (!resolvedAggExp.isEmpty()) {
// push them down to the agg
Aggregate newAgg = new Aggregate(agg.location(), agg.child(), agg.groupings(),
combine(agg.aggregates(), resolvedAggExp));
// wire it up to the filter with the new condition
Filter newFilter = new Filter(f.location(), newAgg, analyzedFilterCondition);
// and finally project the fluff away
return new Project(f.location(), newFilter, agg.output());
}
}
} catch (AnalysisException ex) {
}
return f;
return missing;
}
}
@ -1036,7 +994,7 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
}
left = l.same(common) ? left : new Cast(left.location(), left, common);
right = r.same(common) ? right : new Cast(right.location(), right, common);
return NodeUtils.copyTree(e, Arrays.asList(left, right));
return e.replaceChildren(Arrays.asList(left, right));
}
}

View File

@ -7,27 +7,31 @@ package org.elasticsearch.xpack.sql.analysis.analyzer;
import org.elasticsearch.xpack.sql.capabilities.Unresolvable;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.AttributeSet;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.Functions;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
import org.elasticsearch.xpack.sql.plan.logical.Filter;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
import org.elasticsearch.xpack.sql.tree.Node;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
abstract class Verifier {
@ -141,57 +145,207 @@ abstract class Verifier {
});
});
}
//
// Handle incorrect statement
//
havingContainsOnlyExistingAggs(p, localFailures);
// everything checks out
// mark the plan as analyzed
if (localFailures.isEmpty()) {
p.setAnalyzed();
}
failures.addAll(localFailures);
});
// Concrete verifications
//
// if there are no (major) unresolved failures, do more in-depth analysis
//
if (failures.isEmpty()) {
Map<String, Function> resolvedFunctions = new LinkedHashMap<>();
// collect Function to better reason about encountered attributes
plan.forEachExpressionsDown(e -> {
if (e.resolved() && e instanceof Function) {
Function f = (Function) e;
resolvedFunctions.put(f.functionId(), f);
}
});
// for filtering out duplicated errors
final Set<LogicalPlan> groupingFailures = new LinkedHashSet<>();
plan.forEachDown(p -> {
if (p.analyzed()) {
return;
}
// if the children are unresolved, so will this node; counting it will only add noise
if (!p.childrenResolved()) {
return;
}
Set<Failure> localFailures = new LinkedHashSet<>();
if (!groupingFailures.contains(p)) {
checkGroupBy(p, localFailures, resolvedFunctions, groupingFailures);
}
// everything checks out
// mark the plan as analyzed
if (localFailures.isEmpty()) {
p.setAnalyzed();
}
failures.addAll(localFailures);
});
}
return failures;
}
private static void havingContainsOnlyExistingAggs(LogicalPlan p, Set<Failure> failures) {
/**
* Check validity of Aggregate/GroupBy.
* This rule is needed for two reasons:
* 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar)
* 2. the order/having might contain a non-grouped attribute. This is typically caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved
* (because the expression gets resolved little by little without being pushed down, without the Analyzer modifying anything.
*/
private static boolean checkGroupBy(LogicalPlan p, Set<Failure> localFailures, Map<String, Function> resolvedFunctions, Set<LogicalPlan> groupingFailures) {
return checkGroupByAgg(p, localFailures, groupingFailures, resolvedFunctions)
&& checkGroupByOrder(p, localFailures, groupingFailures, resolvedFunctions)
&& checkGroupByHaving(p, localFailures, groupingFailures, resolvedFunctions);
}
// check whether an orderBy failed
private static boolean checkGroupByOrder(LogicalPlan p, Set<Failure> localFailures, Set<LogicalPlan> groupingFailures, Map<String, Function> functions) {
if (p instanceof OrderBy) {
OrderBy o = (OrderBy) p;
if (o.child() instanceof Aggregate) {
Aggregate a = (Aggregate) o.child();
Map<Expression, Node<?>> missing = new LinkedHashMap<>();
o.order().forEach(oe -> oe.collectFirstChildren(c -> checkGroupMatch(c, oe, a.groupings(), missing, functions)));
if (!missing.isEmpty()) {
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
// get the location of the first missing expression as the order by might be on a different line
localFailures.add(
fail(missing.values().iterator().next(), "Cannot order by non-grouped column" + plural + " %s, expected %s",
Expressions.names(missing.keySet()),
Expressions.names(a.groupings())));
groupingFailures.add(a);
return false;
}
}
}
return true;
}
private static boolean checkGroupByHaving(LogicalPlan p, Set<Failure> localFailures, Set<LogicalPlan> groupingFailures, Map<String, Function> functions) {
if (p instanceof Filter) {
Filter f = (Filter) p;
if (f.child() instanceof Aggregate) {
Aggregate a = (Aggregate) f.child();
List<Attribute> aggs = new ArrayList<>();
Map<Expression, Node<?>> missing = new LinkedHashMap<>();
Expression condition = f.condition();
condition.collectFirstChildren(c -> checkGroupMatch(c, condition, a.groupings(), missing, functions));
a.aggregates().forEach(ne -> {
AggregateFunction af = Functions.extractAggregate(ne);
if (af != null) {
aggs.add(af.toAttribute());
}
});
final List<Attribute> filterAggs = new ArrayList<>();
f.condition().forEachUp(fa -> filterAggs.add(fa.toAttribute()), AggregateFunction.class);
AttributeSet missing = new AttributeSet(filterAggs).substract(new AttributeSet(aggs));
if (!missing.isEmpty()) {
List<String> missingNames = missing.stream()
.map(NamedExpression::name)
.collect(toList());
List<String> expectedNames = aggs.stream()
.map(NamedExpression::name)
.collect(toList());
failures.add(fail(p, "HAVING contains aggregations %s, expected one of %s ", missingNames, expectedNames));
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
localFailures.add(fail(condition, "Cannot filter by non-grouped column" + plural + " %s, expected %s",
Expressions.names(missing.keySet()),
Expressions.names(a.groupings())));
groupingFailures.add(a);
return false;
}
}
}
return true;
}
// check whether plain columns specified in an agg are mentioned in the group-by
private static boolean checkGroupByAgg(LogicalPlan p, Set<Failure> localFailures, Set<LogicalPlan> groupingFailures, Map<String, Function> functions) {
if (p instanceof Aggregate) {
Aggregate a = (Aggregate) p;
// The grouping can not be an aggregate function
a.groupings().forEach(e -> e.forEachUp(c -> {
if (Functions.isAggregate(c)) {
localFailures.add(fail(c, "Cannot use an aggregate [" + c.nodeName().toUpperCase(Locale.ROOT) + "] for grouping"));
}
}));
if (!localFailures.isEmpty()) {
return false;
}
// The agg can be:
// 1. plain column - in which case, there should be an equivalent in groupings
// 2. aggregate over non-grouped column
// 3. scalar function on top of 1 and/or 2. the function needs unfolding to make sure
// the 'source' is valid.
// Note that grouping can be done by a function (GROUP BY YEAR(date)) which means date
// cannot be used as a plain column, only YEAR(date) or aggs(?) on top of it
Map<Expression, Node<?>> missing = new LinkedHashMap<>();
a.aggregates().forEach(ne ->
ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions)));
if (!missing.isEmpty()) {
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
localFailures.add(fail(missing.values().iterator().next(), "Cannot use non-grouped column" + plural + " %s, expected %s",
Expressions.names(missing.keySet()),
Expressions.names(a.groupings())));
return false;
}
}
return true;
}
private static boolean checkGroupMatch(Expression e, Node<?> source, List<Expression> groupings, Map<Expression, Node<?>> missing, Map<String, Function> functions) {
// resolve FunctionAttribute to backing functions
if (e instanceof FunctionAttribute) {
FunctionAttribute fa = (FunctionAttribute) e;
Function function = functions.get(fa.functionId());
// TODO: this should be handled by a different rule
if (function == null) {
return false;
}
e = function;
}
// scalar functions can be a binary tree
// first test the function against the grouping
// and if that fails, start unpacking hoping to find matches
if (e instanceof ScalarFunction) {
ScalarFunction sf = (ScalarFunction) e;
// found group for the expression
if (Expressions.anyMatch(groupings, e::semanticEquals)) {
return true;
}
// unwrap function to find the base
for (Expression arg : sf.arguments()) {
arg.collectFirstChildren(c -> checkGroupMatch(c, source, groupings, missing, functions));
}
return true;
}
// skip literals / foldable
if (e.foldable()) {
return true;
}
// skip aggs (allowed to refer to non-group columns)
// TODO: need to check whether it's possible to agg on a field used inside a scalar for grouping
if (Functions.isAggregate(e)) {
return true;
}
// left without leaves which have to match; if not there's a failure
final Expression exp = e;
if (e.children().isEmpty()) {
if (!Expressions.anyMatch(groupings, c -> exp.semanticEquals(exp instanceof Attribute ? Expressions.attribute(c) : c))) {
missing.put(e, source);
}
return true;
}
return false;
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.NestedSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
@ -43,7 +44,9 @@ import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;import static java.util.Collections.singletonList;
import java.util.Set;
import static java.util.Collections.singletonList;
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
import static org.elasticsearch.search.sort.SortBuilders.scriptSort;
@ -73,14 +76,16 @@ public abstract class SourceGenerator {
for (ColumnReference ref : container.columns()) {
collectFields(ref, sourceFields, docFields, scriptFields);
}
}
if (!sourceFields.isEmpty()) {
source.fetchSource(sourceFields.toArray(new String[sourceFields.size()]), null);
}
for (String field : docFields) {
source.docValueField(field);
}
for (String field : docFields) {
source.docValueField(field);
}
for (Entry<String, Script> entry : scriptFields.entrySet()) {
source.scriptField(entry.getKey(), entry.getValue());
}
@ -152,7 +157,18 @@ public abstract class SourceGenerator {
FieldSortBuilder fieldSort = fieldSort(nfa.name());
String nestedPath = nfa.parentPath();
fieldSort.setNestedPath(nestedPath);
NestedSortBuilder newSort = new NestedSortBuilder(nestedPath);
NestedSortBuilder nestedSort = fieldSort.getNestedSort();
if (nestedSort == null) {
fieldSort.setNestedSort(newSort);
} else {
for (; nestedSort.getNestedSort() != null; nestedSort = nestedSort.getNestedSort()) {
}
nestedSort.setNestedSort(newSort);
}
nestedSort = newSort;
List<QueryBuilder> nestedQuery = new ArrayList<>(1);
@ -171,20 +187,21 @@ public abstract class SourceGenerator {
if (nestedQuery.size() > 1) {
throw new SqlIllegalArgumentException("nested query should have been grouped in one place");
}
fieldSort.setNestedFilter(nestedQuery.get(0));
nestedSort.setFilter(nestedQuery.get(0));
}
sortBuilder = fieldSort;
}
sortBuilder.order(as.direction() == Direction.ASC? SortOrder.ASC : SortOrder.DESC);
}
if (sortable instanceof ScriptSort) {
ScriptSort ss = (ScriptSort) sortable;
sortBuilder = scriptSort(ss.script().toPainless(), ss.script().outputType().isNumeric() ? ScriptSortType.NUMBER : ScriptSortType.STRING);
}
source.sort(sortBuilder);
if (sortBuilder != null) {
sortBuilder.order(sortable.direction() == Direction.ASC ? SortOrder.ASC : SortOrder.DESC);
source.sort(sortBuilder);
}
}
}
else {

View File

@ -15,6 +15,12 @@ public class Alias extends NamedExpression {
private final Expression child;
private final String qualifier;
/**
* Postpone attribute creation until it is actually created.
* Being immutable, create only one instance.
*/
private Attribute lazyAttribute;
public Alias(Location location, String name, Expression child) {
this(location, name, null, child, null);
}
@ -53,6 +59,13 @@ public class Alias extends NamedExpression {
@Override
public Attribute toAttribute() {
if (lazyAttribute == null) {
lazyAttribute = createAttribute();
}
return lazyAttribute;
}
private Attribute createAttribute() {
if (resolved()) {
Expression c = child();

View File

@ -52,6 +52,10 @@ public abstract class Attribute extends NamedExpression {
return new AttributeSet(this);
}
public Attribute withLocation(Location location) {
return Objects.equals(location(), location) ? this : clone(location, name(), dataType(), qualifier(), nullable(), id(), synthetic());
}
public Attribute withQualifier(String qualifier) {
return Objects.equals(qualifier(), qualifier) ? this : clone(location(), name(), dataType(), qualifier, nullable(), id(), synthetic());
}
@ -75,6 +79,16 @@ public abstract class Attribute extends NamedExpression {
return this;
}
@Override
public int semanticHash() {
return id().hashCode();
}
@Override
public boolean semanticEquals(Expression other) {
return other instanceof Attribute ? id().equals(((Attribute) other).id()) : false;
}
@Override
public String toString() {
return name() + "{" + label() + "}" + "#" + id();

View File

@ -0,0 +1,325 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.stream.Stream;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableCollection;
import static java.util.Collections.unmodifiableSet;
public class AttributeMap<E> {
static class AttributeWrapper {
private final Attribute attr;
AttributeWrapper(Attribute attr) {
this.attr = attr;
}
@Override
public int hashCode() {
return attr.semanticHash();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof AttributeWrapper) {
AttributeWrapper aw = (AttributeWrapper) obj;
return attr.semanticEquals(aw.attr);
}
return false;
}
@Override
public String toString() {
return attr.toString();
}
}
/**
* Set that does unwrapping of keys inside the keySet and iterator.
*/
private abstract static class UnwrappingSet<W, U> extends AbstractSet<U> {
private final Set<W> set;
UnwrappingSet(Set<W> originalSet) {
set = unmodifiableSet(originalSet);
}
@Override
public Iterator<U> iterator() {
return new Iterator<U>() {
final Iterator<W> i = set.iterator();
@Override
public boolean hasNext() {
return i.hasNext();
}
@Override
public U next() {
return unwrap(i.next());
}
};
}
protected abstract U unwrap(W next);
@Override
public Stream<U> stream() {
return set.stream().map(this::unwrap);
}
@Override
public Stream<U> parallelStream() {
return set.parallelStream().map(this::unwrap);
}
@Override
public int size() {
return set.size();
}
@Override
public boolean equals(Object o) {
return set.equals(o);
}
@Override
public int hashCode() {
return set.hashCode();
}
@Override
public Object[] toArray() {
Object[] array = set.toArray();
for (int i = 0; i < array.length; i++) {
array[i] = ((AttributeWrapper) array[i]).attr;
}
return array;
}
@Override
@SuppressWarnings("unchecked")
public <A> A[] toArray(A[] a) {
// collection is immutable so use that to our advantage
if (a.length < size())
a = (A[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), size());
int i = 0;
Object[] result = a;
for (U u : this) {
result[i++] = u;
}
// array larger than size, mark the ending element as null
if (a.length > size()) {
a[size()] = null;
}
return a;
}
@Override
public String toString() {
return set.toString();
}
};
private final Map<AttributeWrapper, E> delegate;
private Set<Attribute> keySet = null;
private Collection<E> values = null;
private Set<Entry<Attribute, E>> entrySet = null;
public AttributeMap() {
delegate = new LinkedHashMap<>();
}
public AttributeMap(Map<Attribute, E> attr) {
if (attr.isEmpty()) {
delegate = emptyMap();
}
else {
delegate = new LinkedHashMap<>(attr.size());
for (Entry<Attribute, E> entry : attr.entrySet()) {
delegate.put(new AttributeWrapper(entry.getKey()), entry.getValue());
}
}
}
public AttributeMap(Attribute key, E value) {
delegate = singletonMap(new AttributeWrapper(key), value);
}
void add(Attribute key, E value) {
delegate.put(new AttributeWrapper(key), value);
}
// a set from a collection of sets without (too much) copying
void addAll(AttributeMap<E> other) {
delegate.putAll(other.delegate);
}
public AttributeMap<E> substract(AttributeMap<E> other) {
AttributeMap<E> diff = new AttributeMap<>();
for (Entry<AttributeWrapper, E> entry : this.delegate.entrySet()) {
if (!other.delegate.containsKey(entry.getKey())) {
diff.delegate.put(entry.getKey(), entry.getValue());
}
}
return diff;
}
public AttributeMap<E> intersect(AttributeMap<E> other) {
AttributeMap<E> smaller = (other.size() > size() ? this : other);
AttributeMap<E> larger = (smaller == this ? other : this);
AttributeMap<E> intersect = new AttributeMap<>();
for (Entry<AttributeWrapper, E> entry : smaller.delegate.entrySet()) {
if (larger.delegate.containsKey(entry.getKey())) {
intersect.delegate.put(entry.getKey(), entry.getValue());
}
}
return intersect;
}
public boolean subsetOf(AttributeMap<E> other) {
if (this.size() > other.size()) {
return false;
}
for (AttributeWrapper aw : delegate.keySet()) {
if (!other.delegate.containsKey(aw)) {
return false;
}
}
return true;
}
public Set<String> attributeNames() {
Set<String> s = new LinkedHashSet<>(size());
for (AttributeWrapper aw : delegate.keySet()) {
s.add(aw.attr.name());
}
return s;
}
public int size() {
return delegate.size();
}
public boolean isEmpty() {
return delegate.isEmpty();
}
public boolean containsKey(Object key) {
if (key instanceof NamedExpression) {
return delegate.keySet().contains(new AttributeWrapper(((NamedExpression) key).toAttribute()));
}
return false;
}
public boolean containsValue(Object value) {
return delegate.values().contains(value);
}
public E get(Object key) {
if (key instanceof NamedExpression) {
return delegate.get(new AttributeWrapper(((NamedExpression) key).toAttribute()));
}
return null;
}
public E getOrDefault(Object key, E defaultValue) {
E e;
return (((e = get(key)) != null) || containsKey(key))
? e
: defaultValue;
}
public Set<Attribute> keySet() {
if (keySet == null) {
keySet = new UnwrappingSet<AttributeWrapper, Attribute>(delegate.keySet()) {
@Override
protected Attribute unwrap(AttributeWrapper next) {
return next.attr;
}
};
}
return keySet;
}
public Collection<E> values() {
if (values == null) {
values = unmodifiableCollection(delegate.values());
}
return values;
}
public Set<Entry<Attribute, E>> entrySet() {
if (entrySet == null) {
entrySet = new UnwrappingSet<Entry<AttributeWrapper, E>, Entry<Attribute, E>>(delegate.entrySet()) {
@Override
protected Entry<Attribute, E> unwrap(final Entry<AttributeWrapper, E> next) {
return new Entry<Attribute, E>() {
@Override
public Attribute getKey() {
return next.getKey().attr;
}
@Override
public E getValue() {
return next.getValue();
}
@Override
public E setValue(E value) {
throw new UnsupportedOperationException();
}
};
}
};
}
return entrySet;
}
public void forEach(BiConsumer<? super Attribute, ? super E> action) {
delegate.forEach((k, v) -> action.accept(k.attr, v));
}
@Override
public int hashCode() {
return delegate.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof AttributeMap<?>) {
obj = ((AttributeMap<?>) obj).delegate;
}
return delegate.equals(obj);
}
@Override
public String toString() {
return delegate.toString();
}
}

View File

@ -7,121 +7,74 @@ package org.elasticsearch.xpack.sql.expression;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.Spliterator;
import java.util.function.Consumer;
import java.util.function.Predicate;
import java.util.stream.Stream;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
import static java.util.Collections.emptyMap;
public class AttributeSet implements Set<Attribute> {
public static final AttributeSet EMPTY = new AttributeSet(emptyList());
private static final AttributeMap<Object> EMPTY_DELEGATE = new AttributeMap<>(emptyMap());
private static class AttributeWrapper {
public static final AttributeSet EMPTY = new AttributeSet(EMPTY_DELEGATE);
private final Attribute attr;
// use the same name as in HashSet
private static final Object PRESENT = new Object();
AttributeWrapper(Attribute attr) {
this.attr = attr;
}
@Override
public int hashCode() {
return attr.id().hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof AttributeWrapper) {
AttributeWrapper aw = (AttributeWrapper) obj;
if (attr.getClass() == aw.attr.getClass()) {
return attr.id().equals(aw.attr.id());
}
}
return false;
}
@Override
public String toString() {
return "AttrWrap[" + attr + "]";
}
}
private final Set<AttributeWrapper> delegate;
private final AttributeMap<Object> delegate;
public AttributeSet() {
this.delegate = new LinkedHashSet<>();
delegate = new AttributeMap<>();
}
public AttributeSet(Attribute attr) {
delegate = new AttributeMap<Object>(attr, PRESENT);
}
public AttributeSet(Collection<Attribute> attr) {
if (attr.isEmpty()) {
this.delegate = emptySet();
delegate = EMPTY_DELEGATE;
}
else {
this.delegate = new LinkedHashSet<>(attr.size());
delegate = new AttributeMap<Object>();
for (Attribute a : attr) {
delegate.add(new AttributeWrapper(a));
delegate.add(a, PRESENT);
}
}
}
public AttributeSet(Attribute attr) {
this.delegate = singleton(new AttributeWrapper(attr));
private AttributeSet(AttributeMap<Object> delegate) {
this.delegate = delegate;
}
// package protected - should be called through Expressions to cheaply create
// a set from a collection of sets without too much copying
void addAll(AttributeSet other) {
this.delegate.addAll(other.delegate);
delegate.addAll(other.delegate);
}
public AttributeSet substract(AttributeSet other) {
AttributeSet diff = new AttributeSet();
for (AttributeWrapper aw : this.delegate) {
if (!other.delegate.contains(aw)) {
diff.delegate.add(aw);
}
}
return diff;
return new AttributeSet(delegate.substract(other.delegate));
}
public AttributeSet intersect(AttributeSet other) {
AttributeSet smaller = (other.size() > size() ? this : other);
AttributeSet larger = (smaller == this ? other : this);
AttributeSet intersect = new AttributeSet();
for (AttributeWrapper aw : smaller.delegate) {
if (larger.contains(aw)) {
intersect.delegate.add(aw);
}
}
return intersect;
return new AttributeSet(delegate.intersect(other.delegate));
}
public boolean subsetOf(AttributeSet other) {
if (this.size() > other.size()) {
return false;
}
for (AttributeWrapper aw : delegate) {
if (!other.delegate.contains(aw)) {
return false;
}
}
return delegate.subsetOf(other.delegate);
}
return true;
public Set<String> names() {
return delegate.attributeNames();
}
public void forEach(Consumer<? super Attribute> action) {
delegate.forEach(e -> action.accept(e.attr));
delegate.forEach((k, v) -> action.accept(k));
}
public int size() {
@ -133,15 +86,12 @@ public class AttributeSet implements Set<Attribute> {
}
public boolean contains(Object o) {
if (o instanceof NamedExpression) {
return delegate.contains(new AttributeWrapper(((NamedExpression) o).toAttribute()));
}
return false;
return delegate.containsKey(o);
}
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
if (!delegate.contains(o)) {
if (!delegate.containsKey(o)) {
return false;
}
}
@ -149,42 +99,15 @@ public class AttributeSet implements Set<Attribute> {
}
public Iterator<Attribute> iterator() {
return new Iterator<Attribute>() {
Iterator<AttributeWrapper> internal = delegate.iterator();
@Override
public boolean hasNext() {
return internal.hasNext();
}
@Override
public Attribute next() {
return internal.next().attr;
}
};
return delegate.keySet().iterator();
}
public Object[] toArray() {
Object[] array = delegate.toArray();
for (int i = 0; i < array.length; i++) {
array[i] = ((AttributeWrapper) array[i]).attr;
}
return array;
return delegate.keySet().toArray();
}
@SuppressWarnings("unchecked")
public <T> T[] toArray(T[] a) {
if (a.length < size())
a = (T[]) java.lang.reflect.Array.newInstance(a.getClass().getComponentType(), size());
int i = 0;
Object[] result = a;
for (Attribute attr : this) {
result[i++] = attr;
}
if (a.length > size()) {
a[size()] = null;
}
return a;
return delegate.keySet().toArray(a);
}
public boolean add(Attribute e) {
@ -211,14 +134,6 @@ public class AttributeSet implements Set<Attribute> {
throw new UnsupportedOperationException();
}
public boolean equals(Object o) {
return delegate.equals(o);
}
public int hashCode() {
return delegate.hashCode();
}
public Spliterator<Attribute> spliterator() {
throw new UnsupportedOperationException();
}
@ -228,15 +143,22 @@ public class AttributeSet implements Set<Attribute> {
}
public Stream<Attribute> stream() {
return delegate.stream().map(e -> e.attr);
return delegate.keySet().stream();
}
public Stream<Attribute> parallelStream() {
return delegate.parallelStream().map(e -> e.attr);
return delegate.keySet().parallelStream();
}
public boolean equals(Object o) {
return delegate.equals(o);
}
public int hashCode() {
return delegate.hashCode();
}
@Override
public String toString() {
return delegate.toString();
return delegate.keySet().toString();
}
}

View File

@ -68,6 +68,7 @@ public abstract class Expression extends Node<Expression> implements Resolvable
public abstract boolean nullable();
// the references/inputs/leaves of the expression tree
public AttributeSet references() {
return Expressions.references(children());
}
@ -101,11 +102,11 @@ public abstract class Expression extends Node<Expression> implements Resolvable
return this;
}
public final boolean canonicalEquals(Expression other) {
public boolean semanticEquals(Expression other) {
return canonical().equals(other.canonical());
}
public final int canonicalHash() {
public int semanticHash() {
return canonical().hashCode();
}

View File

@ -9,10 +9,10 @@ import java.util.Objects;
public class ExpressionId {
private final String id;
private final int id;
private final String jvmId;
ExpressionId(String id, String jvmId) {
ExpressionId(int id, String jvmId) {
this.id = id;
this.jvmId = jvmId;
}
@ -33,13 +33,13 @@ public class ExpressionId {
}
ExpressionId other = (ExpressionId) obj;
return Objects.equals(id, other.id)
return id == other.id
&& Objects.equals(jvmId, other.jvmId);
}
@Override
public String toString() {
return id;
return String.valueOf(id);
//#+ jvmId;
}
}

View File

@ -6,7 +6,7 @@
package org.elasticsearch.xpack.sql.expression;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicInteger;
//TODO: this class is thread-safe but used across multiple sessions might cause the id to roll over and potentially generate an already assigned id
// making this session scope would simplify things
@ -15,12 +15,12 @@ import java.util.concurrent.atomic.AtomicLong;
// TODO: hook this into SqlSession#SessionContext
public class ExpressionIdGenerator {
private static final AtomicLong GLOBAL_ID = new AtomicLong();
private static final AtomicInteger GLOBAL_ID = new AtomicInteger();
private static final String JVM_ID = "@" + UUID.randomUUID().toString();
public static final ExpressionId EMPTY = new ExpressionId("<empty>", "@<empty>");
public static final ExpressionId EMPTY = new ExpressionId(-1, "@<empty>");
public static ExpressionId newId() {
return new ExpressionId(String.valueOf(GLOBAL_ID.getAndIncrement()), JVM_ID);
return new ExpressionId(GLOBAL_ID.getAndIncrement(), JVM_ID);
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.xpack.sql.expression.Expression.TypeResolution;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.function.Predicate;
@ -22,6 +23,10 @@ public abstract class Expressions {
.collect(toList());
}
public static NamedExpression wrapAsNamed(Expression exp) {
return exp instanceof NamedExpression ? (NamedExpression) exp : new Alias(exp.location(), exp.nodeName(), exp);
}
public static List<Attribute> asAttributes(List<? extends NamedExpression> named) {
if (named.isEmpty()) {
return emptyList();
@ -33,7 +38,7 @@ public abstract class Expressions {
return list;
}
public static boolean anyMatchInList(List<? extends Expression> exps, Predicate<? super Expression> predicate) {
public static boolean anyMatch(List<? extends Expression> exps, Predicate<? super Expression> predicate) {
for (Expression exp : exps) {
if (exp.anyMatch(predicate)) {
return true;
@ -67,6 +72,15 @@ public abstract class Expressions {
return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.nodeName();
}
public static List<String> names(Collection<Expression> e) {
List<String> names = new ArrayList<>(e.size());
for (Expression ex : e) {
names.add(name(ex));
}
return names;
}
public static Attribute attribute(Expression e) {
return e instanceof NamedExpression ? ((NamedExpression) e).toAttribute() : null;
}
@ -79,5 +93,4 @@ public abstract class Expressions {
return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution(
"Argument required to be numeric ('%s' of type '%s')", Expressions.name(e), e.dataType().esName());
}
}

View File

@ -13,12 +13,14 @@ import org.elasticsearch.xpack.sql.util.CollectionUtils;
import java.util.List;
import java.util.Locale;
import java.util.Objects;
import static java.lang.String.format;
public class UnresolvedAttribute extends Attribute implements Unresolvable {
private final String unresolvedMsg;
private final Object resolutionMetadata;
public UnresolvedAttribute(Location location, String name) {
this(location, name, null);
@ -29,8 +31,18 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
}
public UnresolvedAttribute(Location location, String name, String qualifier, String unresolvedMessage) {
super(location, name, qualifier, null);
this(location, name, qualifier, null, unresolvedMessage, null);
}
public UnresolvedAttribute(Location location, String name, String qualifier, ExpressionId id, String unresolvedMessage, Object resolutionMetadata) {
super(location, name, qualifier, id);
this.unresolvedMsg = unresolvedMessage == null ? errorMessage(qualifiedName(), null) : unresolvedMessage;
this.resolutionMetadata = resolutionMetadata;
}
public Object resolutionMetadata() {
return resolutionMetadata;
}
@Override
@ -43,6 +55,10 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
return this;
}
public UnresolvedAttribute withUnresolvedMessage(String unresolvedMsg) {
return new UnresolvedAttribute(location(), name(), qualifier(), id(), unresolvedMsg, resolutionMetadata());
}
@Override
public DataType dataType() {
throw new UnresolvedException("dataType", this);
@ -75,4 +91,13 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
}
return msg;
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj)) {
UnresolvedAttribute ua = (UnresolvedAttribute) obj;
return Objects.equals(resolutionMetadata, ua.resolutionMetadata) && Objects.equals(unresolvedMsg, ua.unresolvedMsg);
}
return false;
}
}

View File

@ -54,6 +54,11 @@ public abstract class Function extends NamedExpression {
return functionName;
}
// TODO: ExpressionId might be converted into an Int which could make the String an int as well
public String functionId() {
return id().toString();
}
protected String functionArgs() {
StringJoiner sj = new StringJoiner(",", "(", ")");
for (Expression child : children()) {

View File

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression.function;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import java.util.Objects;
public abstract class FunctionAttribute extends TypedAttribute {
private final String functionId;
protected FunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id,
boolean synthetic, String functionId) {
super(location, name, dataType, qualifier, nullable, id, synthetic);
this.functionId = functionId;
}
public String functionId() {
return functionId;
}
@Override
public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(functionId, ((FunctionAttribute) obj).functionId());
}
}

View File

@ -5,45 +5,12 @@
*/
package org.elasticsearch.xpack.sql.expression.function;
import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.BinaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
public abstract class Functions {
public static boolean isAggregateFunction(Expression e) {
public static boolean isAggregate(Expression e) {
return e instanceof AggregateFunction;
}
public static boolean isUnaryScalarFunction(Expression e) {
if (e instanceof BinaryScalarFunction) {
throw new UnsupportedOperationException("not handled currently");
}
return e instanceof UnaryScalarFunction;
}
public static AggregateFunction extractAggregate(NamedExpression ne) {
Expression e = ne;
while (e != null) {
if (e instanceof Alias) {
e = ((Alias) ne).child();
}
else if (e instanceof UnaryScalarFunction) {
e = ((UnaryScalarFunction) e).field();
}
else if (e instanceof BinaryScalarFunction) {
throw new UnsupportedOperationException();
}
else if (e instanceof AggregateFunction) {
return (AggregateFunction) e;
}
else {
e = null;
}
}
return null;
}
}

View File

@ -20,6 +20,8 @@ public abstract class AggregateFunction extends Function {
private final Expression field;
private final List<Expression> parameters;
private AggregateFunctionAttribute lazyAttribute;
AggregateFunction(Location location, Expression field) {
this(location, field, emptyList());
}
@ -38,13 +40,12 @@ public abstract class AggregateFunction extends Function {
return parameters;
}
public String functionId() {
return id().toString();
}
@Override
public AggregateFunctionAttribute toAttribute() {
// this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl)
return new AggregateFunctionAttribute(location(), name(), dataType(), id(), functionId(), null);
if (lazyAttribute == null) {
// this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl)
lazyAttribute = new AggregateFunctionAttribute(location(), name(), dataType(), id(), functionId(), null);
}
return lazyAttribute;
}
}

View File

@ -5,18 +5,17 @@
*/
package org.elasticsearch.xpack.sql.expression.function.aggregate;
import java.util.Objects;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
public class AggregateFunctionAttribute extends TypedAttribute {
import java.util.Objects;
public class AggregateFunctionAttribute extends FunctionAttribute {
private final String functionId;
private final String propertyPath;
AggregateFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, String functionId, String propertyPath) {
@ -24,15 +23,10 @@ public class AggregateFunctionAttribute extends TypedAttribute {
}
AggregateFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, String functionId, String propertyPath) {
super(location, name, dataType, qualifier, nullable, id, synthetic);
this.functionId = functionId;
super(location, name, dataType, qualifier, nullable, id, synthetic, functionId);
this.propertyPath = propertyPath;
}
public String functionId() {
return functionId;
}
public String propertyPath() {
return propertyPath;
}
@ -46,7 +40,7 @@ public class AggregateFunctionAttribute extends TypedAttribute {
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
// this is highly correlated with QueryFolder$FoldAggregate#addFunction (regarding the function name within the querydsl)
// that is the functionId is actually derived from the expression id to easily track it across contexts
return new AggregateFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, functionId, propertyPath);
return new AggregateFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, functionId(), propertyPath);
}
public AggregateFunctionAttribute withFunctionId(String functionId, String propertyPath) {
@ -55,15 +49,11 @@ public class AggregateFunctionAttribute extends TypedAttribute {
@Override
public boolean equals(Object obj) {
if (super.equals(obj)) {
AggregateFunctionAttribute other = (AggregateFunctionAttribute) obj;
return Objects.equals(functionId, other.functionId) && Objects.equals(propertyPath, other.propertyPath);
}
return false;
return super.equals(obj) && Objects.equals(propertyPath(), ((AggregateFunctionAttribute) obj).propertyPath());
}
@Override
protected String label() {
return "a";
return "a->" + functionId();
}
}

View File

@ -5,20 +5,12 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.Arrays;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class BinaryScalarFunction extends ScalarFunction {
private final Expression left, right;
@ -37,16 +29,13 @@ public abstract class BinaryScalarFunction extends ScalarFunction {
return right;
}
@Override
public boolean foldable() {
return left.foldable() && right.foldable();
}
@Override
public ScalarFunctionAttribute toAttribute() {
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessorDefinition());
}
protected ScriptTemplate asScript() {
public ScriptTemplate asScript() {
ScriptTemplate leftScript = asScript(left());
ScriptTemplate rightScript = asScript(right());
@ -54,45 +43,4 @@ public abstract class BinaryScalarFunction extends ScalarFunction {
}
protected abstract ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript);
protected ScriptTemplate asScript(Expression exp) {
if (exp.foldable()) {
return asScriptFromFoldable(exp);
}
Attribute attr = Expressions.attribute(exp);
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", exp);
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
return scalar.script();
}
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
aggregate.dataType());
}
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(formatTemplate("doc[{}].value"),
paramsBuilder().variable(field.name()).build(),
field.dataType());
}
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().variable(foldable.fold()).build(),
foldable.dataType());
}
}

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
@ -19,9 +18,6 @@ import org.elasticsearch.xpack.sql.type.DataTypeConversion;
import java.util.Objects;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public class Cast extends UnaryScalarFunction {
private final DataType dataType;
@ -66,24 +62,11 @@ public class Cast extends UnaryScalarFunction {
new TypeResolution("Cannot cast %s to %s", from(), to());
}
@Override
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
// for aggs, there are no params only bucket paths
Params params = paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build();
return new ScriptTemplate(formatTemplate("{}"), params, aggregate.dataType());
}
@Override
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
return scalar.script();
}
@Override
protected String chainScalarTemplate(String template) {
return template;
}
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(field.name(), Params.EMPTY, field.dataType());

View File

@ -5,20 +5,30 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.List;
import static java.util.Collections.emptyList;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class ScalarFunction extends Function {
private ScalarFunctionAttribute lazyAttribute = null;
private ProcessorDefinition lazyProcessor = null;
protected ScalarFunction(Location location) {
super(location, emptyList());
}
@ -28,9 +38,63 @@ public abstract class ScalarFunction extends Function {
}
@Override
public abstract ScalarFunctionAttribute toAttribute();
public ScalarFunctionAttribute toAttribute() {
if (lazyAttribute == null) {
lazyAttribute = new ScalarFunctionAttribute(location(), name(), dataType(), id(), functionId(), asScript(), orderBy(),
asProcessorDefinition());
}
return lazyAttribute;
}
protected abstract ScriptTemplate asScript();
public abstract ScriptTemplate asScript();
// utility methods for creating the actual scripts
protected ScriptTemplate asScript(Expression exp) {
if (exp.foldable()) {
return asScriptFromFoldable(exp);
}
Attribute attr = Expressions.attribute(exp);
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for expression %s", exp);
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
Params p = paramsBuilder().script(nested.params()).build();
return new ScriptTemplate(formatScript(nested.template()), p, dataType());
}
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
return new ScriptTemplate(formatScript("{}"),
paramsBuilder().variable(foldable.fold()).build(),
foldable.dataType());
}
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(formatScript("doc[{}].value"),
paramsBuilder().variable(field.name()).build(),
field.dataType());
}
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(formatScript("{}"),
paramsBuilder().agg(aggregate).build(),
aggregate.dataType());
}
protected String formatScript(String scriptTemplate) {
return formatTemplate(scriptTemplate);
}
public ProcessorDefinition asProcessorDefinition() {
if (lazyProcessor == null) {

View File

@ -8,24 +8,28 @@ package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
public class ScalarFunctionAttribute extends TypedAttribute {
import java.util.Objects;
public class ScalarFunctionAttribute extends FunctionAttribute {
private final ScriptTemplate script;
private final Expression orderBy;
private final ProcessorDefinition processorDef;
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
this(location, name, dataType, null, true, id, false, script, orderBy, processorDef);
ScalarFunctionAttribute(Location location, String name, DataType dataType, ExpressionId id, String functionId, ScriptTemplate script,
Expression orderBy, ProcessorDefinition processorDef) {
this(location, name, dataType, null, true, id, false, functionId, script, orderBy, processorDef);
}
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
super(location, name, dataType, qualifier, nullable, id, synthetic);
ScalarFunctionAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id,
boolean synthetic, String functionId, ScriptTemplate script, Expression orderBy, ProcessorDefinition processorDef) {
super(location, name, dataType, qualifier, nullable, id, synthetic, functionId);
this.script = script;
this.orderBy = orderBy;
this.processorDef = processorDef;
@ -45,16 +49,23 @@ public class ScalarFunctionAttribute extends TypedAttribute {
@Override
protected Expression canonicalize() {
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, script, orderBy, processorDef);
return new ScalarFunctionAttribute(location(), "<none>", dataType(), null, true, id(), false, functionId(), script, orderBy,
processorDef);
}
@Override
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, script, orderBy, processorDef);
return new ScalarFunctionAttribute(location, name, dataType, qualifier, nullable, id, synthetic, functionId(), script, orderBy,
processorDef);
}
@Override
public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(orderBy, ((ScalarFunctionAttribute) obj).orderBy());
}
@Override
protected String label() {
return "s";
return "s->" + functionId();
}
}

View File

@ -5,19 +5,11 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import static java.util.Collections.singletonList;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class UnaryScalarFunction extends ScalarFunction {
@ -43,53 +35,7 @@ public abstract class UnaryScalarFunction extends ScalarFunction {
}
@Override
public ScalarFunctionAttribute toAttribute() {
String functionId = null;
Attribute attr = Expressions.attribute(field());
if (attr instanceof AggregateFunctionAttribute) {
AggregateFunctionAttribute afa = (AggregateFunctionAttribute) attr;
functionId = afa.functionId();
}
return new ScalarFunctionAttribute(location(), name(), dataType(), id(), asScript(), orderBy(), asProcessorDefinition());
public ScriptTemplate asScript() {
return asScript(field);
}
protected ScriptTemplate asScript() {
if (field.foldable()) {
return asScriptFromFoldable(field);
}
Attribute attr = Expressions.attribute(field());
if (attr != null) {
if (attr instanceof ScalarFunctionAttribute) {
return asScriptFrom((ScalarFunctionAttribute) attr);
}
if (attr instanceof AggregateFunctionAttribute) {
return asScriptFrom((AggregateFunctionAttribute) attr);
}
// fall-back to
return asScriptFrom((FieldAttribute) attr);
}
throw new SqlIllegalArgumentException("Cannot evaluate script for field %s", field());
}
protected ScriptTemplate asScriptFromFoldable(Expression foldable) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().variable(foldable.fold()).build(),
foldable.dataType());
}
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
Params p = paramsBuilder().script(nested.params()).build();
return new ScriptTemplate(chainScalarTemplate(nested.template()), p, dataType());
}
protected abstract ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate);
protected abstract ScriptTemplate asScriptFrom(FieldAttribute field);
protected abstract String chainScalarTemplate(String template);
}

View File

@ -61,7 +61,12 @@ public abstract class ArithmeticFunction extends BinaryScalarFunction {
@Override
protected ScriptTemplate asScriptFrom(ScriptTemplate leftScript, ScriptTemplate rightScript) {
return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), operation.symbol(), rightScript.template()),
String op = operation.symbol();
// escape %
if (operation == BinaryArithmeticOperation.MOD) {
op = "%" + op;
}
return new ScriptTemplate(format(Locale.ROOT, "(%s) %s (%s)", leftScript.template(), op, rightScript.template()),
paramsBuilder()
.script(leftScript.params()).script(rightScript.params())
.build(), dataType());
@ -79,9 +84,10 @@ public abstract class ArithmeticFunction extends BinaryScalarFunction {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("(");
sb.append(left());
if (!(left() instanceof Literal)) {
sb.insert(0, "(");
sb.insert(1, "(");
sb.append(")");
}
sb.append(" ");
@ -93,6 +99,8 @@ public abstract class ArithmeticFunction extends BinaryScalarFunction {
sb.insert(pos, "(");
sb.append(")");
}
sb.append(")#");
sb.append(functionId());
return sb.toString();
}

View File

@ -7,20 +7,14 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor.UnaryArithmeticOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public class Neg extends UnaryScalarFunction {
public Neg(Location location, Expression field) {
@ -43,22 +37,9 @@ public class Neg extends UnaryScalarFunction {
}
@Override
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(formatTemplate("{}"),
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
dataType());
}
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(formatTemplate("doc[{}].value"),
paramsBuilder().variable(field.name()).build(),
dataType());
}
@Override
protected String chainScalarTemplate(String template) {
return template;
protected String formatScript(String template) {
// Painless supports negating (and hopefully its corner cases)
return super.formatScript("-" + template);
}
@Override

View File

@ -30,16 +30,24 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.script.Scri
public abstract class DateTimeFunction extends UnaryScalarFunction implements TimeZoneAware {
private final DateTimeZone timeZone;
private final String name;
DateTimeFunction(Location location, Expression field, DateTimeZone timeZone) {
super(location, field);
this.timeZone = timeZone;
StringBuilder sb = new StringBuilder(super.name());
// add timezone as last argument
sb.insert(sb.length() - 1, " [" + timeZone.getID() + "]");
this.name = sb.toString();
}
public DateTimeZone timeZone() {
return timeZone;
}
@Override
public boolean foldable() {
return field().foldable();
}
@ -51,16 +59,6 @@ public abstract class DateTimeFunction extends UnaryScalarFunction implements Ti
new TypeResolution("Function '%s' cannot be applied on a non-date expression ('%s' of type '%s')", functionName(), Expressions.name(field()), field().dataType().esName());
}
@Override
public ScriptTemplate asScript() {
return super.asScript();
}
@Override
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
throw new UnsupportedOperationException();
}
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
ParamsBuilder params = paramsBuilder();
@ -88,8 +86,9 @@ public abstract class DateTimeFunction extends UnaryScalarFunction implements Ti
return new ScriptTemplate(template, params.build(), dataType());
}
@Override
protected String chainScalarTemplate(String template) {
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
throw new UnsupportedOperationException();
}
@ -102,6 +101,7 @@ public abstract class DateTimeFunction extends UnaryScalarFunction implements Ti
*/
protected abstract ChronoField chronoField();
@Override
protected final ProcessorDefinition makeProcessorDefinition() {
return new UnaryProcessorDefinition(this, ProcessorDefinitions.toProcessorDefinition(field()), new DateTimeProcessor(extractor(), timeZone));
}
@ -115,4 +115,10 @@ public abstract class DateTimeFunction extends UnaryScalarFunction implements Ti
// used for applying ranges
public abstract String dateTimeFormat();
// add tz along the rest of the params
@Override
public String name() {
return name;
}
}

View File

@ -6,18 +6,20 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataTypes;
import org.elasticsearch.xpack.sql.util.StringUtils;
public class E extends MathFunction {
public E(Location location) {
super(location);
}
public boolean foldable() {
return true;
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.E", Params.EMPTY, DataTypes.DOUBLE);
public E(Location location) {
super(location, new Literal(location, Math.E, DataTypes.DOUBLE));
}
@Override
@ -26,8 +28,13 @@ public class E extends MathFunction {
}
@Override
protected ScriptTemplate asScript() {
return new ScriptTemplate(StringUtils.EMPTY);
protected String functionArgs() {
return StringUtils.EMPTY;
}
@Override
public ScriptTemplate asScript() {
return TEMPLATE;
}
@Override

View File

@ -6,15 +6,11 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions;
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
@ -22,8 +18,6 @@ import org.elasticsearch.xpack.sql.type.DataTypes;
import java.util.Locale;
import static java.lang.String.format;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder;
import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate;
public abstract class MathFunction extends UnaryScalarFunction {
@ -39,36 +33,8 @@ public abstract class MathFunction extends UnaryScalarFunction {
return field().foldable();
}
@Override
protected String chainScalarTemplate(String template) {
return createTemplate(template);
}
@Override
// TODO: isn't chain Scalar Template enough?
protected ScriptTemplate asScriptFrom(ScalarFunctionAttribute scalar) {
ScriptTemplate nested = scalar.script();
return new ScriptTemplate(createTemplate(nested.template()),
paramsBuilder().script(nested.params()).build(),
dataType());
}
@Override
protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) {
return new ScriptTemplate(createTemplate(formatTemplate("{}")),
paramsBuilder().agg(aggregate.functionId(), aggregate.propertyPath()).build(),
dataType());
}
@Override
protected ScriptTemplate asScriptFrom(FieldAttribute field) {
return new ScriptTemplate(createTemplate(formatTemplate("doc[{}].value")),
paramsBuilder().variable(field.name()).build(),
dataType());
}
private String createTemplate(String template) {
return format(Locale.ROOT, "Math.%s(%s)", mathFunction(), template);
protected String formatScript(String template) {
return super.formatScript(format(Locale.ROOT, "Math.%s(%s)", mathFunction(), template));
}
protected String mathFunction() {

View File

@ -6,18 +6,20 @@
package org.elasticsearch.xpack.sql.expression.function.scalar.math;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor.MathOperation;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataTypes;
import org.elasticsearch.xpack.sql.util.StringUtils;
public class Pi extends MathFunction {
public Pi(Location location) {
super(location);
}
public boolean foldable() {
return true;
private static final ScriptTemplate TEMPLATE = new ScriptTemplate("Math.PI", Params.EMPTY, DataTypes.DOUBLE);
public Pi(Location location) {
super(location, new Literal(location, Math.PI, DataTypes.DOUBLE));
}
@Override
@ -26,8 +28,13 @@ public class Pi extends MathFunction {
}
@Override
protected ScriptTemplate asScript() {
return new ScriptTemplate(StringUtils.EMPTY);
protected String functionArgs() {
return StringUtils.EMPTY;
}
@Override
public ScriptTemplate asScript() {
return TEMPLATE;
}
@Override

View File

@ -5,17 +5,20 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.script;
class Agg extends Param<String> {
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
private final String aggProperty;
class Agg extends Param<AggregateFunctionAttribute> {
Agg(String aggRef, String aggProperty) {
Agg(AggregateFunctionAttribute aggRef) {
super(aggRef);
this.aggProperty = aggProperty;
}
String aggName() {
return value().functionId();
}
public String aggProperty() {
return aggProperty;
return value().propertyPath();
}
@Override

View File

@ -5,28 +5,31 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.script;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import static java.util.Collections.emptyList;
//
// Parameters for a script
//
// This class mainly exists to handle the different aggregation cases.
// While aggs can appear in scripts like regular parameters, they are not passed
// as parameters but rather as bucket_path.
// However in some cases (like count), it's not the agg path that is relevant but rather
// its property (_count).
// However the agg name still needs to be remembered to properly associate the script with.
//
// Hence why this class supports aggRef (which always returns the agg names) and aggPaths
// (which returns the agg property if it exists or the agg name/reference).
/**
* Parameters for a script
*
* This class mainly exists to handle the different aggregation cases.
* While aggs can appear in scripts like regular parameters, they are not passed
* as parameters but rather as bucket_path.
* However in some cases (like count), it's not the agg path that is relevant but rather
* its property (_count).
* As the agg name still needs to be remembered to properly associate the script with.
*
* Hence why this class supports aggRef (which always returns the agg names) and aggPaths
* (which returns the agg property if it exists or the agg name/reference).
*
* Also the parameter names support late binding/evaluation since the agg reference (like function id)
* can be changed during the optimization phase (for example min agg -&gt; stats.min).
*/
public class Params {
public static final Params EMPTY = new Params(emptyList());
@ -79,7 +82,7 @@ public class Params {
for (Param<?> p : params) {
if (p instanceof Agg) {
Agg a = (Agg) p;
String s = a.aggProperty() != null ? a.aggProperty() : a.value();
String s = a.aggProperty() != null ? a.aggProperty() : a.aggName();
map.put(p.prefix() + aggs++, s);
}
}
@ -93,7 +96,7 @@ public class Params {
for (Param<?> p : params) {
if (p instanceof Agg) {
refs.add(p.value().toString());
refs.add(((Agg) p).aggName());
}
}

View File

@ -5,6 +5,8 @@
*/
package org.elasticsearch.xpack.sql.expression.function.scalar.script;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import java.util.ArrayList;
import java.util.List;
@ -21,12 +23,8 @@ public class ParamsBuilder {
return this;
}
public ParamsBuilder agg(String aggName) {
return agg(aggName, null);
}
public ParamsBuilder agg(String aggName, String aggPath) {
params.add(new Agg(aggName, aggPath));
public ParamsBuilder agg(AggregateFunctionAttribute agg) {
params.add(new Agg(agg));
return this;
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
import org.elasticsearch.xpack.sql.util.StringUtils;
import java.util.List;
import java.util.Locale;
@ -19,6 +20,8 @@ import static java.lang.String.format;
public class ScriptTemplate {
public static final ScriptTemplate EMPTY = new ScriptTemplate(StringUtils.EMPTY);
private final String template;
private final Params params;
// used for sorting based on scripts
@ -89,7 +92,7 @@ public class ScriptTemplate {
return bindTemplate();
}
public static String formatTemplate(String template, Object... params) {
return format(Locale.ROOT, template.replace("{}", "params.%%s"), params);
public static String formatTemplate(String template) {
return template.replace("{}", "params.%s");
}
}

View File

@ -48,7 +48,7 @@ public abstract class Predicates {
List<Expression> common = new ArrayList<>(Math.min(l.size(), r.size()));
for (Expression lExp : l) {
for (Expression rExp : r) {
if (lExp.canonicalEquals(rExp)) {
if (lExp.semanticEquals(rExp)) {
common.add(lExp);
}
}
@ -60,7 +60,7 @@ public abstract class Predicates {
List<Expression> diff = new ArrayList<>(Math.min(from.size(), r.size()));
for (Expression lExp : from) {
for (Expression rExp : r) {
if (!lExp.canonicalEquals(rExp)) {
if (!lExp.semanticEquals(rExp)) {
diff.add(lExp);
}
}

View File

@ -8,10 +8,12 @@ package org.elasticsearch.xpack.sql.optimizer;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Alias;
import org.elasticsearch.xpack.sql.expression.Attribute;
import org.elasticsearch.xpack.sql.expression.AttributeMap;
import org.elasticsearch.xpack.sql.expression.AttributeSet;
import org.elasticsearch.xpack.sql.expression.BinaryExpression;
import org.elasticsearch.xpack.sql.expression.BinaryOperator.Negateable;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.ExpressionId;
import org.elasticsearch.xpack.sql.expression.ExpressionSet;
import org.elasticsearch.xpack.sql.expression.Expressions;
import org.elasticsearch.xpack.sql.expression.Literal;
@ -19,6 +21,7 @@ import org.elasticsearch.xpack.sql.expression.NamedExpression;
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
import org.elasticsearch.xpack.sql.expression.Order;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.aggregate.ExtendedStats;
@ -32,6 +35,8 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks
import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles;
import org.elasticsearch.xpack.sql.expression.function.aggregate.Stats;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.predicate.And;
import org.elasticsearch.xpack.sql.expression.predicate.BinaryComparison;
import org.elasticsearch.xpack.sql.expression.predicate.Equals;
@ -98,15 +103,17 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
Batch aggregate = new Batch("Aggregation",
new PruneDuplicatesInGroupBy(),
new ReplaceDuplicateAggsWithReferences(),
new CombineAggsToMatrixStats(),
new CombineAggsToExtendedStats(),
new CombineAggsToStats(),
new ReplaceAggsWithMatrixStats(),
new ReplaceAggsWithExtendedStats(),
new ReplaceAggsWithStats(),
new PromoteStatsToExtendedStats(),
new CombineAggsToPercentiles(),
new CombineAggsToPercentileRanks()
new ReplaceAggsWithPercentiles(),
new ReplceAggsWithPercentileRanks()
);
Batch operators = new Batch("Operator Optimization",
// combining
new CombineProjections(),
// folding
new ReplaceFoldableAttributes(),
new ConstantFolding(),
@ -165,7 +172,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
if (plan instanceof Aggregate) {
Aggregate a = (Aggregate) plan;
// clean group expressions
List<Expression> cleanedGroups = a.groupings().stream().map(this::trimAliases).collect(toList());
List<Expression> cleanedGroups = a.groupings().stream().map(CleanAliases::trimAliases).collect(toList());
return new Aggregate(a.location(), a.child(), cleanedGroups, cleanExpressions(a.aggregates()));
}
@ -178,11 +185,11 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
private List<NamedExpression> cleanExpressions(List<? extends NamedExpression> args) {
return args.stream().map(this::trimNonTopLevelAliases).map(NamedExpression.class::cast)
return args.stream().map(CleanAliases::trimNonTopLevelAliases).map(NamedExpression.class::cast)
.collect(toList());
}
private Expression trimNonTopLevelAliases(Expression e) {
static Expression trimNonTopLevelAliases(Expression e) {
if (e instanceof Alias) {
Alias a = (Alias) e;
return new Alias(a.location(), a.name(), a.qualifier(), trimAliases(a.child()), a.id());
@ -190,7 +197,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
return trimAliases(e);
}
private Expression trimAliases(Expression e) {
private static Expression trimAliases(Expression e) {
return e.transformDown(Alias::child, Alias.class);
}
}
@ -245,7 +252,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineAggsToMatrixStats extends Rule<LogicalPlan, LogicalPlan> {
static class ReplaceAggsWithMatrixStats extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan p) {
@ -253,7 +260,13 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
Map<String, AggregateFunctionAttribute> promotedFunctionIds = new LinkedHashMap<>();
p = p.transformExpressionsUp(e -> rule(e, seen, promotedFunctionIds));
return p.transformExpressionsDown(e -> CombineAggsToStats.updateFunctionAttrs(e, promotedFunctionIds));
// nothing found
if (seen.isEmpty()) {
return p;
}
return ReplaceAggsWithStats.updateAggAttributes(p, promotedFunctionIds);
}
@Override
@ -282,15 +295,21 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineAggsToExtendedStats extends Rule<LogicalPlan, LogicalPlan> {
static class ReplaceAggsWithExtendedStats extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan p) {
Map<String, AggregateFunctionAttribute> promotedFunctionIds = new LinkedHashMap<>();
Map<Expression, ExtendedStats> seen = new LinkedHashMap<>();
p = p.transformExpressionsUp(e -> rule(e, seen, promotedFunctionIds));
// nothing found
if (seen.isEmpty()) {
return p;
}
// update old agg attributes
return p.transformExpressionsDown(e -> CombineAggsToStats.updateFunctionAttrs(e, promotedFunctionIds));
return ReplaceAggsWithStats.updateAggAttributes(p, promotedFunctionIds);
}
@Override
@ -319,29 +338,44 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineAggsToStats extends Rule<LogicalPlan, LogicalPlan> {
static class ReplaceAggsWithStats extends Rule<LogicalPlan, LogicalPlan> {
private static class Counter {
private static class Match {
final Stats stats;
int count = 1;
final Set<Class<? extends AggregateFunction>> functionTypes = new LinkedHashSet<>();
Counter(Stats stats) {
Match(Stats stats) {
this.stats = stats;
}
@Override
public String toString() {
return stats.toString();
}
}
@Override
public LogicalPlan apply(LogicalPlan p) {
Map<Expression, Counter> potentialPromotions = new LinkedHashMap<>();
Map<Expression, Match> potentialPromotions = new LinkedHashMap<>();
p.forEachExpressionsUp(e -> collect(e, potentialPromotions));
// no promotions found - skip
if (potentialPromotions.isEmpty()) {
return p;
}
// start promotion
// old functionId to new function attribute
Map<String, AggregateFunctionAttribute> promotedFunctionIds = new LinkedHashMap<>();
p.forEachExpressionsUp(e -> count(e, potentialPromotions));
// promote aggs to InnerAggs
// 1. promote aggs to InnerAggs
p = p.transformExpressionsUp(e -> promote(e, potentialPromotions, promotedFunctionIds));
// update old agg attributes (TODO: this might be applied while updating the InnerAggs since the promotion happens bottom-up (and thus any attributes should be only in higher nodes)
return p.transformExpressionsDown(e -> updateFunctionAttrs(e, promotedFunctionIds));
// 2. update the old agg attrs to the promoted agg functions
return updateAggAttributes(p, promotedFunctionIds);
}
@Override
@ -349,21 +383,21 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
return e;
}
private Expression count(Expression e, Map<Expression, Counter> seen) {
private Expression collect(Expression e, Map<Expression, Match> seen) {
if (Stats.isTypeCompatible(e)) {
AggregateFunction f = (AggregateFunction) e;
Expression argument = f.field();
Counter counter = seen.get(argument);
Match match = seen.get(argument);
if (counter == null) {
counter = new Counter(new Stats(f.location(), argument));
counter.functionTypes.add(f.getClass());
seen.put(argument, counter);
if (match == null) {
match = new Match(new Stats(f.location(), argument));
match.functionTypes.add(f.getClass());
seen.put(argument, match);
}
else {
if (counter.functionTypes.add(f.getClass())) {
counter.count++;
if (match.functionTypes.add(f.getClass())) {
match.count++;
}
}
}
@ -371,12 +405,12 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
return e;
}
private Expression promote(Expression e, Map<Expression, Counter> seen, Map<String, AggregateFunctionAttribute> attrs) {
private static Expression promote(Expression e, Map<Expression, Match> seen, Map<String, AggregateFunctionAttribute> attrs) {
if (Stats.isTypeCompatible(e)) {
AggregateFunction f = (AggregateFunction) e;
Expression argument = f.field();
Counter counter = seen.get(argument);
Match counter = seen.get(argument);
// if the stat has at least two different functions for it, promote it as stat
if (counter != null && counter.count > 1) {
@ -388,7 +422,74 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
return e;
}
static Expression updateFunctionAttrs(Expression e, Map<String, AggregateFunctionAttribute> promotedIds) {
static LogicalPlan updateAggAttributes(LogicalPlan p, Map<String, AggregateFunctionAttribute> promotedFunctionIds) {
// 1. update old agg function attributes
p = p.transformExpressionsUp(e -> updateAggFunctionAttrs(e, promotedFunctionIds));
// 2. update all scalar function consumers of the promoted aggs
// since they contain the old ids in scrips and processorDefinitions that need regenerating
// 2a. collect ScalarFunctions that unwrapped refer to any of the updated aggregates
// 2b. replace any of the old ScalarFunction attributes
final Set<String> newAggIds = new LinkedHashSet<>(promotedFunctionIds.size());
for (AggregateFunctionAttribute afa : promotedFunctionIds.values()) {
newAggIds.add(afa.functionId());
}
final Map<String, ScalarFunctionAttribute> updatedScalarAttrs = new LinkedHashMap<>();
final Map<ExpressionId, ScalarFunctionAttribute> updatedScalarAliases = new LinkedHashMap<>();
p = p.transformExpressionsUp(e -> {
// replace scalar attributes of the old replaced functions
if (e instanceof ScalarFunctionAttribute) {
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) e;
// check aliases
sfa = updatedScalarAttrs.getOrDefault(sfa.functionId(), sfa);
// check scalars
sfa = updatedScalarAliases.getOrDefault(sfa.id(), sfa);
return sfa;
}
// unwrap aliases as they 'hide' functions under their own attributes
if (e instanceof Alias) {
Attribute att = Expressions.attribute(e);
if (att instanceof ScalarFunctionAttribute) {
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) att;
// the underlying function has been updated
// thus record the alias as well
if (updatedScalarAttrs.containsKey(sfa.functionId())) {
updatedScalarAliases.put(sfa.id(), sfa);
}
}
}
else if (e instanceof ScalarFunction) {
ScalarFunction sf = (ScalarFunction) e;
// if it's a unseen function check if the function children/arguments refers to any of the promoted aggs
if (!updatedScalarAttrs.containsKey(sf.functionId()) && e.anyMatch(c -> {
Attribute a = Expressions.attribute(c);
if (a instanceof FunctionAttribute) {
return newAggIds.contains(((FunctionAttribute) a).functionId());
}
return false;
})) {
// if so, record its attribute
updatedScalarAttrs.put(sf.functionId(), sf.toAttribute());
}
}
return e;
});
return p;
}
private static Expression updateAggFunctionAttrs(Expression e, Map<String, AggregateFunctionAttribute> promotedIds) {
if (e instanceof AggregateFunctionAttribute) {
AggregateFunctionAttribute ae = (AggregateFunctionAttribute) e;
AggregateFunctionAttribute promoted = promotedIds.get(ae.functionId());
@ -443,7 +544,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineAggsToPercentiles extends Rule<LogicalPlan, LogicalPlan> {
static class ReplaceAggsWithPercentiles extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan p) {
@ -463,7 +564,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
Map<String, AggregateFunctionAttribute> promotedFunctionIds = new LinkedHashMap<>();
p = p.transformExpressionsUp(e -> rule(e, percentilesPerField, promotedFunctionIds));
// finally update all the function references as well
return p.transformExpressionsDown(e -> CombineAggsToStats.updateFunctionAttrs(e, promotedFunctionIds));
return p.transformExpressionsDown(e -> ReplaceAggsWithStats.updateAggFunctionAttrs(e, promotedFunctionIds));
}
private void count(Expression e, Map<Expression, Set<Expression>> percentsPerField) {
@ -500,7 +601,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineAggsToPercentileRanks extends Rule<LogicalPlan, LogicalPlan> {
static class ReplceAggsWithPercentileRanks extends Rule<LogicalPlan, LogicalPlan> {
@Override
public LogicalPlan apply(LogicalPlan p) {
@ -520,7 +621,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
Map<String, AggregateFunctionAttribute> promotedFunctionIds = new LinkedHashMap<>();
p = p.transformExpressionsUp(e -> rule(e, ranksPerField, promotedFunctionIds));
// finally update all the function references as well
return p.transformExpressionsDown(e -> CombineAggsToStats.updateFunctionAttrs(e, promotedFunctionIds));
return p.transformExpressionsDown(e -> ReplaceAggsWithStats.updateAggFunctionAttrs(e, promotedFunctionIds));
}
private void count(Expression e, Map<Expression, Set<Expression>> ranksPerField) {
@ -810,15 +911,55 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
}
}
static class CombineFilters extends OptimizerRule<Filter> {
static class CombineProjections extends OptimizerRule<Project> {
CombineProjections() {
super(TransformDirection.UP);
}
@Override
protected LogicalPlan rule(Filter filter) {
if (filter.child() instanceof Filter) {
Filter child = (Filter) filter.child();
throw new UnsupportedOperationException("not implemented yet");
protected LogicalPlan rule(Project project) {
LogicalPlan child = project.child();
if (child instanceof Project) {
Project p = (Project) child;
// eliminate lower project but first replace the aliases in the upper one
return new Project(p.location(), p.child(), combineProjections(project.projections(), p.projections()));
}
throw new UnsupportedOperationException("not implemented yet");
if (child instanceof Aggregate) {
Aggregate a = (Aggregate) child;
return new Aggregate(a.location(), a.child(), a.groupings(), combineProjections(project.projections(), a.aggregates()));
}
return project;
}
// normally only the upper projections should survive but since the lower list might have aliases definitions
// that might be reused by the upper one, these need to be replaced.
// for example an alias defined in the lower list might be referred in the upper - without replacing it the alias becomes invalid
private List<NamedExpression> combineProjections(List<? extends NamedExpression> upper, List<? extends NamedExpression> lower) {
// collect aliases in the lower list
Map<Attribute, Alias> map = new LinkedHashMap<>();
for (NamedExpression ne : lower) {
if (ne instanceof Alias) {
Alias a = (Alias) ne;
map.put(a.toAttribute(), a);
}
}
AttributeMap<Alias> aliases = new AttributeMap<>(map);
List<NamedExpression> replaced = new ArrayList<>();
// replace any matching attribute with a lower alias (if there's a match)
// but clean-up non-top aliases at the end
for (NamedExpression ne : upper) {
NamedExpression replacedExp = (NamedExpression) ne.transformUp(a -> {
Alias as = aliases.get(a);
return as != null ? as : a;
}, Attribute.class);
replaced.add((NamedExpression) CleanAliases.trimNonTopLevelAliases(replacedExp));
}
return replaced;
}
}
@ -963,7 +1104,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
if (FALSE.equals(l) || FALSE.equals(r)) {
return FALSE;
}
if (l.canonicalEquals(r)) {
if (l.semanticEquals(r)) {
return l;
}
@ -1001,7 +1142,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
return l;
}
if (l.canonicalEquals(r)) {
if (l.semanticEquals(r)) {
return l;
}
@ -1070,14 +1211,14 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
// true for equality
if (bc instanceof Equals || bc instanceof GreaterThanOrEqual || bc instanceof LessThanOrEqual) {
if (!l.nullable() && !r.nullable() && l.canonicalEquals(r)) {
if (!l.nullable() && !r.nullable() && l.semanticEquals(r)) {
return TRUE;
}
}
// false for equality
if (bc instanceof GreaterThan || bc instanceof LessThan) {
if (!l.nullable() && !r.nullable() && l.canonicalEquals(r)) {
if (!l.nullable() && !r.nullable() && l.semanticEquals(r)) {
return FALSE;
}
}
@ -1220,7 +1361,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
this(TransformDirection.DOWN);
}
OptimizerRule(TransformDirection direction) {
protected OptimizerRule(TransformDirection direction) {
this.direction = direction;
}

View File

@ -27,10 +27,10 @@ import org.elasticsearch.xpack.sql.plan.TableIdentifier;
import org.elasticsearch.xpack.sql.plan.logical.Aggregate;
import org.elasticsearch.xpack.sql.plan.logical.Distinct;
import org.elasticsearch.xpack.sql.plan.logical.Filter;
import org.elasticsearch.xpack.sql.plan.logical.LocalRelation;
import org.elasticsearch.xpack.sql.plan.logical.Join;
import org.elasticsearch.xpack.sql.plan.logical.Join.JoinType;
import org.elasticsearch.xpack.sql.plan.logical.Limit;
import org.elasticsearch.xpack.sql.plan.logical.LocalRelation;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.OrderBy;
import org.elasticsearch.xpack.sql.plan.logical.Project;
@ -76,11 +76,11 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder {
LogicalPlan plan = plan(ctx.queryTerm());
if (!ctx.orderBy().isEmpty()) {
plan = new OrderBy(source(ctx), plan, visitList(ctx.orderBy(), Order.class));
plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class));
}
if (ctx.limit != null && ctx.INTEGER_VALUE() != null) {
plan = new Limit(source(ctx), new Literal(source(ctx), Integer.parseInt(ctx.limit.getText()), DataTypes.INTEGER), plan);
plan = new Limit(source(ctx.limit), new Literal(source(ctx), Integer.parseInt(ctx.limit.getText()), DataTypes.INTEGER), plan);
}
return plan;

View File

@ -30,7 +30,7 @@ public class Project extends UnaryPlan {
@Override
public boolean resolved() {
return super.resolved() && !Expressions.anyMatchInList(projections, Functions::isAggregateFunction);
return super.resolved() && !Expressions.anyMatch(projections, Functions::isAggregate);
}
@Override

View File

@ -115,9 +115,8 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
if (e instanceof ScalarFunction) {
processors.put(attr, ProcessorDefinitions.toProcessorDefinition(e));
}
}
else {
throw new PlanningException("Don't know how to translate expression %s", e);
} else {
processors.put(aliasAttr, ProcessorDefinitions.toProcessorDefinition(e));
}
}
else {
@ -208,6 +207,9 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
throw new PlanningException("Aggregation filtering not supported (yet) without explicit grouping");
//aggs = aggs.addAgg(null, filter);
}
if (targetGroup == null) {
throw new PlanningException("Cannot determine group column; likely an invalid query - please report");
}
else {
aggs = aggs.updateGroup(targetGroup.withPipelines(combine(targetGroup.subPipelines(), filter)));
}
@ -312,7 +314,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
}
// or found an aggregate expression (which has to work on an attribute used for grouping)
// (can happen when dealing with a root group)
if (Functions.isAggregateFunction(exp)) {
if (Functions.isAggregate(exp)) {
Tuple<QueryContainer, AggPathInput> withFunction = addAggFunction(matchingGroup, (AggregateFunction) exp, compoundAggMap, qC.get());
qC.set(withFunction.v1());
return withFunction.v2();
@ -347,7 +349,7 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
}
else {
// the only thing left is agg function
Check.isTrue(Functions.isAggregateFunction(child), "Expected aggregate function inside alias; got %s", child.nodeString());
Check.isTrue(Functions.isAggregate(child), "Expected aggregate function inside alias; got %s", child.nodeString());
Tuple<QueryContainer, AggPathInput> withAgg = addAggFunction(matchingGroup, (AggregateFunction) child, compoundAggMap, queryC);
//FIXME: what about inner key
queryC = withAgg.v1().addAggColumn(withAgg.v2().context());
@ -435,6 +437,8 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
QueryContainer qContainer = exec.queryContainer();
for (Order order : plan.order()) {
Direction direction = Direction.from(order.direction());
// check whether sorting is on an group (and thus nested agg) or field
Attribute attr = ((NamedExpression) order.child()).toAttribute();
// check whether there's an alias (occurs with scalar functions which are not named)
@ -442,8 +446,6 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
String lookup = attr.id().toString();
GroupingAgg group = qContainer.findGroupForAgg(lookup);
Direction direction = Direction.from(order.direction());
// TODO: might need to validate whether the target field or group actually exist
if (group != null && group != GroupingAgg.DEFAULT_GROUP) {
// check whether the lookup matches a group

View File

@ -31,6 +31,7 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.PercentileRanks
import org.elasticsearch.xpack.sql.expression.function.aggregate.Percentiles;
import org.elasticsearch.xpack.sql.expression.function.aggregate.Stats;
import org.elasticsearch.xpack.sql.expression.function.aggregate.Sum;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeHistogramFunction;
@ -90,6 +91,7 @@ import org.elasticsearch.xpack.sql.type.DataTypes;
import org.elasticsearch.xpack.sql.util.Check;
import org.elasticsearch.xpack.sql.util.ReflectionUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashMap;
@ -100,7 +102,6 @@ import java.util.Map.Entry;
import static java.lang.String.format;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.xpack.sql.expression.Foldables.doubleValuesOf;
import static org.elasticsearch.xpack.sql.expression.Foldables.stringValueOf;
import static org.elasticsearch.xpack.sql.expression.Foldables.valueOf;
@ -109,7 +110,7 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.script.Scri
abstract class QueryTranslator {
static final List<ExppressionTranslator<?>> QUERY_TRANSLATORS = Arrays.asList(
static final List<ExpressionTranslator<?>> QUERY_TRANSLATORS = Arrays.asList(
new BinaryComparisons(),
new Ranges(),
new BinaryLogic(),
@ -156,7 +157,7 @@ abstract class QueryTranslator {
static QueryTranslation toQuery(Expression e, boolean onAggs) {
QueryTranslation translation = null;
for (ExppressionTranslator<?> translator : QUERY_TRANSLATORS) {
for (ExpressionTranslator<?> translator : QUERY_TRANSLATORS) {
translation = translator.translate(e, onAggs);
if (translation != null) {
return translation;
@ -191,9 +192,10 @@ abstract class QueryTranslator {
this.groupMap = groupMap;
this.groupPath = propertyPath;
aggNames = groupMap.values().stream()
.map(i -> i.id())
.collect(toList());
aggNames = new ArrayList<>(groupMap.size());
for (GroupingAgg gAgg : groupMap.values()) {
aggNames.add(gAgg.id());
}
Iterator<Entry<ExpressionId, GroupingAgg>> iterator = groupMap.entrySet().iterator();
@ -211,7 +213,7 @@ abstract class QueryTranslator {
GroupingAgg groupFor(Expression exp) {
if (Functions.isAggregateFunction(exp)) {
if (Functions.isAggregate(exp)) {
AggregateFunction f = (AggregateFunction) exp;
// if there's at least one agg in the tree
if (groupPath != null) {
@ -256,6 +258,7 @@ abstract class QueryTranslator {
if (exp instanceof NamedExpression) {
NamedExpression ne = (NamedExpression) exp;
// change analyzed to non non-analyzed attributes
if (exp instanceof FieldAttribute) {
FieldAttribute fa = (FieldAttribute) exp;
if (fa.isAnalyzed()) {
@ -268,15 +271,20 @@ abstract class QueryTranslator {
GroupingAgg agg = null;
// dates are handled differently because of date histograms
if (exp instanceof DateTimeFunction) {
DateTimeFunction dtf = (DateTimeFunction) exp;
if (dtf instanceof DateTimeHistogramFunction) {
DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) dtf;
// handle functions differently
if (exp instanceof Function) {
// dates are handled differently because of date histograms
if (exp instanceof DateTimeHistogramFunction) {
DateTimeHistogramFunction dthf = (DateTimeHistogramFunction) exp;
agg = new GroupByDateAgg(aggId, AggPath.bucketValue(propertyPath), nameOf(exp), dthf.interval(), dthf.timeZone());
}
// all other scalar functions become a script
else if (exp instanceof ScalarFunction) {
ScalarFunction sf = (ScalarFunction) exp;
agg = new GroupByScriptAgg(aggId, AggPath.bucketValue(propertyPath), nameOf(exp), sf.asScript());
}
else {
agg = new GroupByScriptAgg(aggId, AggPath.bucketValue(propertyPath), nameOf(exp), dtf.asScript());
throw new SqlIllegalArgumentException("Cannot GROUP BY function %s", exp);
}
}
else {
@ -418,7 +426,7 @@ abstract class QueryTranslator {
// TODO: need to optimize on ngram
// TODO: see whether escaping is needed
static class Likes extends ExppressionTranslator<BinaryExpression> {
static class Likes extends ExpressionTranslator<BinaryExpression> {
@Override
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
@ -459,7 +467,7 @@ abstract class QueryTranslator {
}
}
static class StringQueries extends ExppressionTranslator<StringQueryPredicate> {
static class StringQueries extends ExpressionTranslator<StringQueryPredicate> {
@Override
protected QueryTranslation asQuery(StringQueryPredicate q, boolean onAggs) {
@ -467,7 +475,7 @@ abstract class QueryTranslator {
}
}
static class Matches extends ExppressionTranslator<MatchQueryPredicate> {
static class Matches extends ExpressionTranslator<MatchQueryPredicate> {
@Override
protected QueryTranslation asQuery(MatchQueryPredicate q, boolean onAggs) {
@ -475,7 +483,7 @@ abstract class QueryTranslator {
}
}
static class MultiMatches extends ExppressionTranslator<MultiMatchQueryPredicate> {
static class MultiMatches extends ExpressionTranslator<MultiMatchQueryPredicate> {
@Override
protected QueryTranslation asQuery(MultiMatchQueryPredicate q, boolean onAggs) {
@ -483,7 +491,7 @@ abstract class QueryTranslator {
}
}
static class BinaryLogic extends ExppressionTranslator<BinaryExpression> {
static class BinaryLogic extends ExpressionTranslator<BinaryExpression> {
@Override
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
@ -498,7 +506,7 @@ abstract class QueryTranslator {
}
}
static class Nots extends ExppressionTranslator<Not> {
static class Nots extends ExpressionTranslator<Not> {
@Override
protected QueryTranslation asQuery(Not not, boolean onAggs) {
@ -507,7 +515,7 @@ abstract class QueryTranslator {
}
}
static class Nulls extends ExppressionTranslator<UnaryExpression> {
static class Nulls extends ExpressionTranslator<UnaryExpression> {
@Override
protected QueryTranslation asQuery(UnaryExpression ue, boolean onAggs) {
@ -520,11 +528,13 @@ abstract class QueryTranslator {
}
// assume the Optimizer properly orders the predicates to ease the translation
static class BinaryComparisons extends ExppressionTranslator<BinaryComparison> {
static class BinaryComparisons extends ExpressionTranslator<BinaryComparison> {
@Override
protected QueryTranslation asQuery(BinaryComparison bc, boolean onAggs) {
Check.isTrue(bc.right().foldable(), "don't know how to translate right %s in %s", bc.right().nodeString(), bc);
Check.isTrue(bc.right().foldable(),
"Line %d:%d - Comparisons against variables are not (currently) supported; offender %s in %s",
bc.right().location().getLineNumber(), bc.right().location().getColumnNumber(), bc.right().nodeName(), bc.nodeName());
if (bc.left() instanceof NamedExpression) {
NamedExpression ne = (NamedExpression) bc.left();
@ -541,7 +551,7 @@ abstract class QueryTranslator {
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
ScriptTemplate scriptTemplate = sfa.script();
String template = formatTemplate("%s %s {}", scriptTemplate.template(), bc.symbol());
String template = formatTemplate(format(Locale.ROOT, "%s %s {}", scriptTemplate.template(), bc.symbol()));
// no need to bind the wrapped/target agg - it is already available through the nested script (needed to create the script itself)
Params params = paramsBuilder().script(scriptTemplate.params()).variable(valueOf(bc.right())).build();
ScriptTemplate script = new ScriptTemplate(template, params, DataTypes.BOOLEAN);
@ -565,10 +575,10 @@ abstract class QueryTranslator {
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
// TODO: handle case where both sides of the comparison are functions
template = formatTemplate("{} %s {}", bc.symbol());
template = formatTemplate(format(Locale.ROOT, "{} %s {}", bc.symbol()));
// bind the agg and the variable to the script
params = paramsBuilder().agg(fa.functionId(), fa.propertyPath()).variable(valueOf(bc.right())).build();
params = paramsBuilder().agg(fa).variable(valueOf(bc.right())).build();
}
aggFilter = new AggFilter(at.id().toString(), new ScriptTemplate(template, params, DataTypes.BOOLEAN));
@ -624,7 +634,7 @@ abstract class QueryTranslator {
}
}
static class Ranges extends ExppressionTranslator<Range> {
static class Ranges extends ExpressionTranslator<Range> {
@Override
protected QueryTranslation asQuery(Range r, boolean onAggs) {
@ -649,11 +659,11 @@ abstract class QueryTranslator {
ScalarFunctionAttribute sfa = (ScalarFunctionAttribute) at;
ScriptTemplate scriptTemplate = sfa.script();
String template = formatTemplate("({} %s %s) && (%s %s {})",
String template = formatTemplate(format(Locale.ROOT, "({} %s %s) && (%s %s {})",
r.includeLower() ? "<=" : "<",
scriptTemplate.template(),
scriptTemplate.template(),
r.includeUpper() ? "<=" : "<");
scriptTemplate.template(),
scriptTemplate.template(),
r.includeUpper() ? "<=" : "<"));
// no need to bind the wrapped/target - it is already available through the nested script (needed to create the script itself)
Params params = paramsBuilder().variable(lower)
@ -683,13 +693,13 @@ abstract class QueryTranslator {
if (at instanceof AggregateFunctionAttribute) {
AggregateFunctionAttribute fa = (AggregateFunctionAttribute) at;
template = formatTemplate("{} %s {} && {} %s {}",
r.includeLower() ? "<=" : "<",
r.includeUpper() ? "<=" : "<");
template = formatTemplate(format(Locale.ROOT, "{} %s {} && {} %s {}",
r.includeLower() ? "<=" : "<",
r.includeUpper() ? "<=" : "<"));
params = paramsBuilder().variable(lower)
.agg(fa.functionId(), fa.propertyPath())
.agg(fa.functionId(), fa.propertyPath())
.agg(fa)
.agg(fa)
.variable(upper)
.build();
@ -848,7 +858,7 @@ abstract class QueryTranslator {
}
abstract static class ExppressionTranslator<E extends Expression> {
abstract static class ExpressionTranslator<E extends Expression> {
private final Class<E> typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass());

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.sql.querydsl.agg;
import java.util.Locale;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.type.DataTypes;
import java.util.Locale;
import static java.lang.String.format;
public class AndAggFilter extends AggFilter {

View File

@ -5,13 +5,13 @@
*/
package org.elasticsearch.xpack.sql.querydsl.agg;
import java.util.Locale;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.Params;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder;
import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate;
import org.elasticsearch.xpack.sql.type.DataTypes;
import java.util.Locale;
import static java.lang.String.format;
public class OrAggFilter extends AggFilter {

View File

@ -12,6 +12,8 @@ import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import static java.util.Collections.emptyList;
/**
* Immutable tree structure.
* The traversal is done depth-first, pre-order (first the node then its children), that is seeks up and then goes down.
@ -102,7 +104,47 @@ public abstract class Node<T extends Node<T>> {
@SuppressWarnings("unchecked")
public boolean anyMatch(Predicate<? super T> predicate) {
boolean result = predicate.test((T) this);
return result ? true : children().stream().anyMatch(c -> c.anyMatch(predicate));
if (!result) {
for (T child : children) {
if (child.anyMatch(predicate)) {
return true;
}
}
}
return result;
}
public List<T> collect(Predicate<? super T> predicate) {
List<T> l = new ArrayList<>();
forEachDown(n -> {
if (predicate.test(n)) {
l.add(n);
}
});
return l.isEmpty() ? emptyList() : l;
}
public List<T> collectLeaves() {
return collect(n -> n.children().isEmpty());
}
// parse the list in pre-order and on match, skip the child/branch and move on to the next child/branch
public List<T> collectFirstChildren(Predicate<? super T> predicate) {
List<T> matches = new ArrayList<>();
doCollectFirst(predicate, matches);
return matches;
}
@SuppressWarnings("unchecked")
protected void doCollectFirst(Predicate<? super T> predicate, List<T> matches) {
T t = (T) this;
if (predicate.test(t)) {
matches.add(t);
} else {
for (T child : children()) {
child.doCollectFirst(predicate, matches);
}
}
}
// TODO: maybe add a flatMap (need to double check the Stream bit)
@ -163,7 +205,11 @@ public abstract class Node<T extends Node<T>> {
transformedChildren.add(next);
}
return (childrenChanged ? NodeUtils.copyTree(this, transformedChildren) : (T) this);
return (childrenChanged ? replaceChildren(transformedChildren) : (T) this);
}
public T replaceChildren(List<T> newChildren) {
return NodeUtils.copyTree(this, newChildren);
}
//
@ -202,6 +248,7 @@ public abstract class Node<T extends Node<T>> {
return changed ? NodeUtils.cloneNode(this, props) : (T) this;
}
@Override
public int hashCode() {
return Objects.hash(children);

View File

@ -41,7 +41,7 @@ public abstract class NodeUtils {
private static final String TO_STRING_IGNORE_PROP = "location";
private static final int TO_STRING_MAX_PROP = 10;
private static final int TO_STRING_MAX_WIDTH = 100;
private static final int TO_STRING_MAX_WIDTH = 110;
private static final Map<Class<?>, NodeInfo> CACHE = new LinkedHashMap<>();
@ -68,7 +68,7 @@ public abstract class NodeUtils {
// public Literal left() { return left; }
// public Literal right() { return right; }
// }
public static <T extends Node<T>> T copyTree(Node<T> tree, List<T> newChildren) {
static <T extends Node<T>> T copyTree(Node<T> tree, List<T> newChildren) {
Check.notNull(tree, "Non-null tree expected");
// basic sanity check
@ -190,7 +190,7 @@ public abstract class NodeUtils {
return props;
}
public static Object[] properties(Node<?> tree) {
static Object[] properties(Node<?> tree) {
return properties(tree, info(tree.getClass()));
}
@ -244,7 +244,11 @@ public abstract class NodeUtils {
}
String stringValue = Objects.toString(object);
if (maxWidth + stringValue.length() > TO_STRING_MAX_WIDTH) {
stringValue = stringValue.substring(0, Math.max(0, TO_STRING_MAX_WIDTH - maxWidth)) + "~";
int cutoff = Math.max(0, TO_STRING_MAX_WIDTH - maxWidth);
sb.append(stringValue.substring(0, cutoff));
sb.append("\n");
stringValue = stringValue.substring(cutoff);
maxWidth = 0;
}
maxWidth += stringValue.length();
sb.append(stringValue);

View File

@ -9,6 +9,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import static java.util.Collections.emptyList;
@ -46,7 +47,15 @@ public abstract class CollectionUtils {
List<T> list = new ArrayList<>();
for (Collection<? extends T> col : collections) {
list.addAll(col);
// typically AttributeSet which ends up iterating anyway plus creating a redundant array
if (col instanceof Set) {
for (T t : col) {
list.add(t);
}
}
else {
list.addAll(col);
}
}
return list;
}

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.sql.analysis.analyzer;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.xpack.sql.analysis.AnalysisException;
import org.elasticsearch.xpack.sql.analysis.catalog.Catalog;
import org.elasticsearch.xpack.sql.analysis.catalog.EsIndex;
@ -24,6 +25,7 @@ import java.util.Map;
import static java.util.Collections.singletonList;
@TestLogging("org.elasticsearch.xpack.sql:TRACE")
public class VerifierErrorMessagesTests extends ESTestCase {
private SqlParser parser;
@ -94,19 +96,51 @@ public class VerifierErrorMessagesTests extends ESTestCase {
assertEquals("1:26: Unknown column [xxx]", verify("SELECT * FROM test WHERE xxx = 1"));
}
public void testMissingColumnInOrderby() {
public void testMissingColumnInOrderBy() {
// xxx offset is that of the order by field
assertEquals("1:29: Unknown column [xxx]", verify("SELECT * FROM test ORDER BY xxx"));
}
public void testMissingColumnFunctionInOrderby() {
public void testMissingColumnFunctionInOrderBy() {
// xxx offset is that of the order by field
assertEquals("1:41: Unknown column [xxx]", verify("SELECT * FROM test ORDER BY DAY_oF_YEAR(xxx)"));
}
public void testMultipleColumns() {
// xxx offset is that of the order by field
assertEquals("1:43: Unknown column [xxx]\nline 1:8: Unknown column [xxx]",
verify("SELECT xxx FROM test GROUP BY DAY_oF_YEAR(xxx)"));
}
// GROUP BY
public void testGroupBySelectNonGrouped() {
assertEquals("1:8: Cannot use non-grouped column [text], expected [int]",
verify("SELECT text, int FROM test GROUP BY int"));
}
public void testGroupByOrderByNonGrouped() {
assertEquals("1:50: Cannot order by non-grouped column [bool], expected [text]",
verify("SELECT MAX(int) FROM test GROUP BY text ORDER BY bool"));
}
public void testGroupByOrderByScalarOverNonGrouped() {
assertEquals("1:50: Cannot order by non-grouped column [bool], expected [text]",
verify("SELECT MAX(int) FROM test GROUP BY text ORDER BY ABS(bool)"));
}
public void testGroupByHavingNonGrouped() {
assertEquals("1:48: Cannot filter by non-grouped column [int], expected [text]",
verify("SELECT AVG(int) FROM test GROUP BY text HAVING int > 10"));
}
public void testGroupByAggregate() {
assertEquals("1:36: Cannot use an aggregate [AVG] for grouping",
verify("SELECT AVG(int) FROM test GROUP BY AVG(int)"));
}
public void testGroupByScalarFunctionWithAggOnTarget() {
assertEquals("1:31: Cannot use an aggregate [AVG] for grouping",
verify("SELECT int FROM test GROUP BY AVG(int) + 2"));
}
}

View File

@ -0,0 +1,181 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.expression;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.tree.Location;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import static java.util.stream.Collectors.toList;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.arrayWithSize;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
public class AttributeMapTests extends ESTestCase {
private static Attribute a(String name) {
return new UnresolvedAttribute(Location.EMPTY, name);
}
private static AttributeMap<String> threeMap() {
Map<Attribute, String> map = new LinkedHashMap<>();
map.put(a("one"), "one");
map.put(a("two"), "two");
map.put(a("three"), "three");
return new AttributeMap<>(map);
}
public void testEmptyConstructor() {
AttributeMap<Object> m = new AttributeMap<>();
assertThat(m.size(), is(0));
assertThat(m.isEmpty(), is(true));
}
public void testMapConstructor() {
Map<Attribute, String> map = new LinkedHashMap<>();
map.put(a("one"), "one");
map.put(a("two"), "two");
map.put(a("three"), "three");
AttributeMap<String> m = new AttributeMap<>(map);
assertThat(m.size(), is(3));
assertThat(m.isEmpty(), is(false));
Attribute one = m.keySet().iterator().next();
assertThat(m.containsKey(one), is(true));
assertThat(m.containsKey(a("one")), is(false));
assertThat(m.containsValue("one"), is(true));
assertThat(m.containsValue("on"), is(false));
assertThat(m.attributeNames(), contains("one", "two", "three"));
assertThat(m.values(), contains(map.values().toArray()));
// defensive copying
map.put(a("four"), "four");
assertThat(m.size(), is(3));
assertThat(m.isEmpty(), is(false));
}
public void testSingleItemConstructor() {
Attribute one = a("one");
AttributeMap<String> m = new AttributeMap<>(one, "one");
assertThat(m.size(), is(1));
assertThat(m.isEmpty(), is(false));
assertThat(m.containsKey(one), is(true));
assertThat(m.containsKey(a("one")), is(false));
assertThat(m.containsValue("one"), is(true));
assertThat(m.containsValue("on"), is(false));
}
public void testSubstract() {
AttributeMap<String> m = threeMap();
AttributeMap<String> mo = new AttributeMap<>(m.keySet().iterator().next(), "one");
AttributeMap<String> empty = new AttributeMap<>();
assertThat(m.substract(empty), is(m));
assertThat(m.substract(m), is(empty));
assertThat(mo.substract(m), is(empty));
AttributeMap<String> substract = m.substract(mo);
assertThat(substract.size(), is(2));
assertThat(substract.attributeNames(), contains("two", "three"));
}
public void testIntersect() {
AttributeMap<String> m = threeMap();
AttributeMap<String> mo = new AttributeMap<>(m.keySet().iterator().next(), "one");
AttributeMap<String> empty = new AttributeMap<>();
assertThat(m.intersect(empty), is(empty));
assertThat(m.intersect(m), is(m));
assertThat(mo.intersect(m), is(mo));
}
public void testSubsetOf() {
AttributeMap<String> m = threeMap();
AttributeMap<String> mo = new AttributeMap<>(m.keySet().iterator().next(), "one");
AttributeMap<String> empty = new AttributeMap<>();
assertThat(m.subsetOf(empty), is(false));
assertThat(m.subsetOf(m), is(true));
assertThat(mo.subsetOf(m), is(true));
assertThat(empty.subsetOf(m), is(true));
assertThat(mo.subsetOf(m), is(true));
}
public void testKeySet() {
Attribute one = a("one");
Attribute two = a("two");
Attribute three = a("three");
Map<Attribute, String> map = new LinkedHashMap<>();
map.put(one, "one");
map.put(two, "two");
map.put(three, "three");
Set<Attribute> keySet = new AttributeMap<>(map).keySet();
assertThat(keySet, contains(one, two, three));
// toObject
Object[] array = keySet.toArray();
assertThat(array, arrayWithSize(3));
assertThat(array, arrayContaining(one, two, three));
}
public void testValues() {
AttributeMap<String> m = threeMap();
Collection<String> values = m.values();
assertThat(values, hasSize(3));
assertThat(values, contains("one", "two", "three"));
}
public void testEntrySet() {
Attribute one = a("one");
Attribute two = a("two");
Attribute three = a("three");
Map<Attribute, String> map = new LinkedHashMap<>();
map.put(one, "one");
map.put(two, "two");
map.put(three, "three");
Set<Entry<Attribute, String>> set = new AttributeMap<>(map).entrySet();
assertThat(set, hasSize(3));
List<Attribute> keys = set.stream().map(Map.Entry::getKey).collect(toList());
List<String> values = set.stream().map(Map.Entry::getValue).collect(toList());
assertThat(keys, hasSize(3));
assertThat(values, hasSize(3));
assertThat(values, contains("one", "two", "three"));
}
public void testForEach() {
AttributeMap<String> m = threeMap();
Map<Attribute, String> collect = new LinkedHashMap<>();
m.forEach(collect::put);
AttributeMap<String> copy = new AttributeMap<>(collect);
assertThat(m, is(copy));
}
}