SQL: update support for nested docs (elastic/x-pack-elasticsearch#3546)
* SQL: update support for nested docs Expand sample data with nested docs Optimize source generation for nested parents Nested objects fields are specified in inner-hits and don't need to be specified at the top-level query. Further more disable parent source if not needed. ComputingFieldRef needs to return the hitName of its wrapped children otherwise nested values cannot be extracted Disable GROUP BY/HAVING on nested fields Update FieldAttribute requirements in the random tests Original commit: elastic/x-pack-elasticsearch@e44951b5f6
This commit is contained in:
parent
c4474f8574
commit
7d3c6a778f
|
@ -48,7 +48,8 @@ public abstract class CsvSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
readScriptSpec("/columns.csv-spec", parser),
|
||||
readScriptSpec("/datetime.csv-spec", parser),
|
||||
readScriptSpec("/alias.csv-spec", parser),
|
||||
readScriptSpec("/nulls.csv-spec", parser)
|
||||
readScriptSpec("/nulls.csv-spec", parser),
|
||||
readScriptSpec("/nested.csv-spec", parser)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -21,8 +21,11 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
@ -43,6 +46,13 @@ public class DataLoader {
|
|||
makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy");
|
||||
}
|
||||
|
||||
private static void createString(String name, XContentBuilder builder) throws Exception {
|
||||
builder.startObject(name).field("type", "text")
|
||||
.startObject("fields")
|
||||
.startObject("keyword").field("type", "keyword").endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
}
|
||||
protected static void loadDatasetIntoEs(RestClient client, String index) throws Exception {
|
||||
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
|
||||
createIndex.startObject("settings");
|
||||
|
@ -57,33 +67,87 @@ public class DataLoader {
|
|||
createIndex.startObject("properties");
|
||||
{
|
||||
createIndex.startObject("emp_no").field("type", "integer").endObject();
|
||||
createIndex.startObject("first_name").field("type", "text").endObject();
|
||||
createIndex.startObject("last_name").field("type", "text").endObject();
|
||||
createString("first_name", createIndex);
|
||||
createString("last_name", createIndex);
|
||||
createIndex.startObject("gender").field("type", "keyword").endObject();
|
||||
createIndex.startObject("birth_date").field("type", "date").endObject();
|
||||
createIndex.startObject("hire_date").field("type", "date").endObject();
|
||||
createIndex.startObject("salary").field("type", "integer").endObject();
|
||||
createIndex.startObject("languages").field("type", "byte").endObject();
|
||||
{
|
||||
createIndex.startObject("dep").field("type", "nested");
|
||||
createIndex.startObject("properties");
|
||||
createIndex.startObject("dep_id").field("type", "keyword").endObject();
|
||||
createString("dep_name", createIndex);
|
||||
createIndex.startObject("from_date").field("type", "date").endObject();
|
||||
createIndex.startObject("to_date").field("type", "date").endObject();
|
||||
createIndex.endObject();
|
||||
createIndex.endObject();
|
||||
}
|
||||
}
|
||||
createIndex.endObject();
|
||||
}
|
||||
createIndex.endObject();
|
||||
}
|
||||
createIndex.endObject().endObject();
|
||||
|
||||
client.performRequest("PUT", "/" + index, emptyMap(), new StringEntity(createIndex.string(), ContentType.APPLICATION_JSON));
|
||||
|
||||
|
||||
Map<String, String> deps = new LinkedHashMap<>();
|
||||
csvToLines("departments", (titles, fields) -> deps.put(fields.get(0), fields.get(1)));
|
||||
|
||||
Map<String, List<List<String>>> dep_emp = new LinkedHashMap<>();
|
||||
csvToLines("dep_emp", (titles, fields) -> {
|
||||
String emp_no = fields.get(0);
|
||||
List<List<String>> list = dep_emp.get(emp_no);
|
||||
if (list == null) {
|
||||
list = new ArrayList<>();
|
||||
dep_emp.put(emp_no, list);
|
||||
}
|
||||
List<String> dep = new ArrayList<>();
|
||||
// dep_id
|
||||
dep.add(fields.get(1));
|
||||
// dep_name (from departments)
|
||||
dep.add(deps.get(fields.get(1)));
|
||||
// from
|
||||
dep.add(fields.get(2));
|
||||
// to
|
||||
dep.add(fields.get(3));
|
||||
list.add(dep);
|
||||
});
|
||||
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
csvToLines("employees", (titles, fields) -> {
|
||||
bulk.append("{\"index\":{}}\n");
|
||||
bulk.append('{');
|
||||
String emp_no = fields.get(1);
|
||||
for (int f = 0; f < fields.size(); f++) {
|
||||
if (f != 0) {
|
||||
bulk.append(',');
|
||||
}
|
||||
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
|
||||
}
|
||||
// append department
|
||||
List<List<String>> list = dep_emp.get(emp_no);
|
||||
if (!list.isEmpty()) {
|
||||
bulk.append(", \"dep\" : [");
|
||||
for (List<String> dp : list) {
|
||||
bulk.append("{");
|
||||
bulk.append("\"dep_id\":\"" + dp.get(0) + "\",");
|
||||
bulk.append("\"dep_name\":\"" + dp.get(1) + "\",");
|
||||
bulk.append("\"from_date\":\"" + dp.get(2) + "\",");
|
||||
bulk.append("\"to_date\":\"" + dp.get(3) + "\"");
|
||||
bulk.append("},");
|
||||
}
|
||||
// remove last ,
|
||||
bulk.setLength(bulk.length() - 1);
|
||||
bulk.append("]");
|
||||
}
|
||||
|
||||
bulk.append("}\n");
|
||||
});
|
||||
|
||||
client.performRequest("POST", "/" + index + "/emp/_bulk", singletonMap("refresh", "true"),
|
||||
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
|
||||
}
|
||||
|
@ -119,5 +183,4 @@ public class DataLoader {
|
|||
public static InputStream readFromJarUrl(URL source) throws IOException {
|
||||
return source.openStream();
|
||||
}
|
||||
|
||||
}
|
|
@ -103,41 +103,50 @@ public class JdbcAssert {
|
|||
int columns = metaData.getColumnCount();
|
||||
|
||||
long count = 0;
|
||||
for (count = 0; expected.next(); count++) {
|
||||
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
|
||||
try {
|
||||
for (count = 0; expected.next(); count++) {
|
||||
assertTrue("Expected more data but no more entries found after [" + count + "]", actual.next());
|
||||
|
||||
if (logger != null) {
|
||||
if (logger != null) {
|
||||
logger.info(JdbcTestUtils.resultSetCurrentData(actual));
|
||||
}
|
||||
|
||||
for (int column = 1; column <= columns; column++) {
|
||||
Object expectedObject = expected.getObject(column);
|
||||
Object actualObject = actual.getObject(column);
|
||||
|
||||
int type = metaData.getColumnType(column);
|
||||
|
||||
String msg = format(Locale.ROOT, "Different result for column [" + metaData.getColumnName(column) + "], entry [" + count
|
||||
+ "]; " + "expected %s but was %s", expectedObject, actualObject);
|
||||
|
||||
// handle nulls first
|
||||
if (expectedObject == null || actualObject == null) {
|
||||
assertEquals(expectedObject, actualObject);
|
||||
}
|
||||
// then timestamp
|
||||
else if (type == Types.TIMESTAMP || type == Types.TIMESTAMP_WITH_TIMEZONE) {
|
||||
assertEquals(getTime(expected, column), getTime(actual, column));
|
||||
}
|
||||
// and floats/doubles
|
||||
else if (type == Types.DOUBLE) {
|
||||
// the 1d/1f difference is used due to rounding/flooring
|
||||
assertEquals(msg, (double) expectedObject, (double) actualObject, 1d);
|
||||
} else if (type == Types.FLOAT) {
|
||||
assertEquals(msg, (float) expectedObject, (float) actualObject, 1f);
|
||||
}
|
||||
// finally the actual comparison
|
||||
else {
|
||||
assertEquals(msg, expectedObject, actualObject);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (AssertionError ae) {
|
||||
if (logger != null && actual.next()) {
|
||||
logger.info("^^^ Assertion failure ^^^");
|
||||
logger.info(JdbcTestUtils.resultSetCurrentData(actual));
|
||||
}
|
||||
|
||||
for (int column = 1; column <= columns; column++) {
|
||||
Object expectedObject = expected.getObject(column);
|
||||
Object actualObject = actual.getObject(column);
|
||||
|
||||
int type = metaData.getColumnType(column);
|
||||
|
||||
String msg = "Different result for column [" + metaData.getColumnName(column) + "], entry [" + count + "]";
|
||||
|
||||
// handle nulls first
|
||||
if (expectedObject == null || actualObject == null) {
|
||||
assertEquals(expectedObject, actualObject);
|
||||
}
|
||||
// then timestamp
|
||||
else if (type == Types.TIMESTAMP || type == Types.TIMESTAMP_WITH_TIMEZONE) {
|
||||
assertEquals(getTime(expected, column), getTime(actual, column));
|
||||
}
|
||||
// and floats/doubles
|
||||
else if (type == Types.DOUBLE) {
|
||||
// the 1d/1f difference is used due to rounding/flooring
|
||||
assertEquals(msg, (double) expectedObject, (double) actualObject, 1d);
|
||||
} else if (type == Types.FLOAT) {
|
||||
assertEquals(msg, (float) expectedObject, (float) actualObject, 1f);
|
||||
}
|
||||
// finally the actual comparison
|
||||
else {
|
||||
assertEquals(msg, expectedObject, actualObject);
|
||||
}
|
||||
}
|
||||
throw ae;
|
||||
}
|
||||
|
||||
if (actual.next()) {
|
||||
|
|
|
@ -39,14 +39,6 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
// TODO: add tests for nested docs when interplug communication is enabled
|
||||
// "DESCRIBE emp.emp",
|
||||
// "SELECT dep FROM emp.emp",
|
||||
// "SELECT dep.dept_name, first_name, last_name FROM emp.emp WHERE emp_no = 10020",
|
||||
// "SELECT first_name f, last_name l, dep.from_date FROM emp.emp WHERE dep.dept_name = 'Production' ORDER BY dep.from_date",
|
||||
// "SELECT first_name f, last_name l, YEAR(dep.from_date) start "
|
||||
// + "FROM emp.emp WHERE dep.dept_name = 'Production' AND tenure > 30 ORDER BY start"
|
||||
|
||||
private static class SqlSpecParser implements Parser {
|
||||
@Override
|
||||
public Object parse(String line) {
|
||||
|
|
|
@ -28,14 +28,22 @@ DESCRIBE test_alias;
|
|||
|
||||
column:s | type:s
|
||||
|
||||
birth_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
salary | INTEGER
|
||||
birth_date | TIMESTAMP
|
||||
dep | STRUCT
|
||||
dep.dep_id | VARCHAR
|
||||
dep.dep_name | VARCHAR
|
||||
dep.dep_name.keyword | VARCHAR
|
||||
dep.from_date | TIMESTAMP
|
||||
dep.to_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
first_name.keyword | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
last_name.keyword | VARCHAR
|
||||
salary | INTEGER
|
||||
;
|
||||
|
||||
describePattern
|
||||
|
@ -43,14 +51,22 @@ DESCRIBE test_*;
|
|||
|
||||
column:s | type:s
|
||||
|
||||
birth_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
salary | INTEGER
|
||||
birth_date | TIMESTAMP
|
||||
dep | STRUCT
|
||||
dep.dep_id | VARCHAR
|
||||
dep.dep_name | VARCHAR
|
||||
dep.dep_name.keyword | VARCHAR
|
||||
dep.from_date | TIMESTAMP
|
||||
dep.to_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
first_name.keyword | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
last_name.keyword | VARCHAR
|
||||
salary | INTEGER
|
||||
;
|
||||
|
||||
showAlias
|
||||
|
|
|
@ -118,13 +118,21 @@ test_emp |INDEX
|
|||
describe
|
||||
DESCRIBE "test_emp";
|
||||
|
||||
column:s | type:s
|
||||
birth_date |TIMESTAMP
|
||||
emp_no |INTEGER
|
||||
first_name |VARCHAR
|
||||
gender |VARCHAR
|
||||
hire_date |TIMESTAMP
|
||||
languages |TINYINT
|
||||
last_name |VARCHAR
|
||||
salary |INTEGER
|
||||
;
|
||||
column:s | type:s
|
||||
birth_date | TIMESTAMP
|
||||
dep | STRUCT
|
||||
dep.dep_id | VARCHAR
|
||||
dep.dep_name | VARCHAR
|
||||
dep.dep_name.keyword | VARCHAR
|
||||
dep.from_date | TIMESTAMP
|
||||
dep.to_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
first_name.keyword | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
last_name.keyword | VARCHAR
|
||||
salary | INTEGER
|
||||
;
|
|
@ -3,10 +3,18 @@
|
|||
//
|
||||
|
||||
debug
|
||||
//SHOW TABLES 'test_emp';
|
||||
//SELECT * FROM t*;
|
||||
SHOW TABLES LIKE 'test\_alias' ESCAPE '\';
|
||||
SELECT first_name f, last_name l, dep.from_date d FROM test_emp WHERE dep.dep_name = 'Production' ORDER BY f LIMIT 5;
|
||||
|
||||
table:s
|
||||
test_emp
|
||||
;
|
||||
f:s | l:s | d:ts
|
||||
|
||||
Alain | Chappelet | 589420800000
|
||||
Chirstian | Koblick | 533779200000
|
||||
Duangkaew | Piveteau | 848793600000
|
||||
Elvis | Demeyer | 761443200000
|
||||
Gino | Leonhardt | 607996800000
|
||||
;
|
||||
|
||||
//SELECT YEAR(dep.from_date) start FROM test_emp WHERE dep.dep_name = 'Production' GROUP BY start LIMIT 5;
|
||||
//table:s
|
||||
//test_emp
|
||||
//;
|
|
@ -0,0 +1,111 @@
|
|||
emp_no,dep_id,from_date,to_date
|
||||
10001,d005,1986-06-26,9999-01-01
|
||||
10002,d007,1996-08-03,9999-01-01
|
||||
10003,d004,1995-12-03,9999-01-01
|
||||
10004,d004,1986-12-01,9999-01-01
|
||||
10005,d003,1989-09-12,9999-01-01
|
||||
10006,d005,1990-08-05,9999-01-01
|
||||
10007,d008,1989-02-10,9999-01-01
|
||||
10008,d005,1998-03-11,2000-07-31
|
||||
10009,d006,1985-02-18,9999-01-01
|
||||
10010,d004,1996-11-24,2000-06-26
|
||||
10010,d006,2000-06-26,9999-01-01
|
||||
10011,d009,1990-01-22,1996-11-09
|
||||
10012,d005,1992-12-18,9999-01-01
|
||||
10013,d003,1985-10-20,9999-01-01
|
||||
10014,d005,1993-12-29,9999-01-01
|
||||
10015,d008,1992-09-19,1993-08-22
|
||||
10016,d007,1998-02-11,9999-01-01
|
||||
10017,d001,1993-08-03,9999-01-01
|
||||
10018,d004,1992-07-29,9999-01-01
|
||||
10018,d005,1987-04-03,1992-07-29
|
||||
10019,d008,1999-04-30,9999-01-01
|
||||
10020,d004,1997-12-30,9999-01-01
|
||||
10021,d005,1988-02-10,2002-07-15
|
||||
10022,d005,1999-09-03,9999-01-01
|
||||
10023,d005,1999-09-27,9999-01-01
|
||||
10024,d004,1998-06-14,9999-01-01
|
||||
10025,d005,1987-08-17,1997-10-15
|
||||
10026,d004,1995-03-20,9999-01-01
|
||||
10027,d005,1995-04-02,9999-01-01
|
||||
10028,d005,1991-10-22,1998-04-06
|
||||
10029,d004,1991-09-18,1999-07-08
|
||||
10029,d006,1999-07-08,9999-01-01
|
||||
10030,d004,1994-02-17,9999-01-01
|
||||
10031,d005,1991-09-01,9999-01-01
|
||||
10032,d004,1990-06-20,9999-01-01
|
||||
10033,d006,1987-03-18,1993-03-24
|
||||
10034,d007,1995-04-12,1999-10-31
|
||||
10035,d004,1988-09-05,9999-01-01
|
||||
10036,d003,1992-04-28,9999-01-01
|
||||
10037,d005,1990-12-05,9999-01-01
|
||||
10038,d009,1989-09-20,9999-01-01
|
||||
10039,d003,1988-01-19,9999-01-01
|
||||
10040,d005,1993-02-14,2002-01-22
|
||||
10040,d008,2002-01-22,9999-01-01
|
||||
10041,d007,1989-11-12,9999-01-01
|
||||
10042,d002,1993-03-21,2000-08-10
|
||||
10043,d005,1990-10-20,9999-01-01
|
||||
10044,d004,1994-05-21,9999-01-01
|
||||
10045,d004,1996-11-16,9999-01-01
|
||||
10046,d008,1992-06-20,9999-01-01
|
||||
10047,d004,1989-03-31,9999-01-01
|
||||
10048,d005,1985-02-24,1987-01-27
|
||||
10049,d009,1992-05-04,9999-01-01
|
||||
10050,d002,1990-12-25,1992-11-05
|
||||
10050,d007,1992-11-05,9999-01-01
|
||||
10051,d004,1992-10-15,9999-01-01
|
||||
10052,d008,1997-01-31,9999-01-01
|
||||
10053,d007,1994-11-13,9999-01-01
|
||||
10054,d003,1995-07-29,9999-01-01
|
||||
10055,d001,1992-04-27,1995-07-22
|
||||
10056,d005,1990-02-01,9999-01-01
|
||||
10057,d005,1992-01-15,9999-01-01
|
||||
10058,d001,1988-04-25,9999-01-01
|
||||
10059,d002,1991-06-26,9999-01-01
|
||||
10060,d007,1989-05-28,1992-11-11
|
||||
10060,d009,1992-11-11,9999-01-01
|
||||
10061,d007,1989-12-02,9999-01-01
|
||||
10062,d005,1991-08-30,9999-01-01
|
||||
10063,d004,1989-04-08,9999-01-01
|
||||
10064,d008,1985-11-20,1992-03-02
|
||||
10065,d005,1998-05-24,9999-01-01
|
||||
10066,d005,1986-02-26,9999-01-01
|
||||
10067,d006,1987-03-04,9999-01-01
|
||||
10068,d007,1987-08-07,9999-01-01
|
||||
10069,d004,1992-06-14,9999-01-01
|
||||
10070,d005,1985-10-14,1995-10-18
|
||||
10070,d008,1995-10-18,9999-01-01
|
||||
10071,d003,1995-08-05,9999-01-01
|
||||
10072,d005,1989-05-21,9999-01-01
|
||||
10073,d006,1998-02-02,1998-02-22
|
||||
10074,d005,1990-08-13,9999-01-01
|
||||
10075,d005,1988-05-17,2001-01-15
|
||||
10076,d005,1996-07-15,9999-01-01
|
||||
10077,d003,1994-12-23,9999-01-01
|
||||
10078,d005,1994-09-29,9999-01-01
|
||||
10079,d005,1995-12-13,9999-01-01
|
||||
10080,d002,1994-09-28,1997-07-09
|
||||
10080,d003,1997-07-09,9999-01-01
|
||||
10081,d004,1986-10-30,9999-01-01
|
||||
10082,d008,1990-01-03,1990-01-15
|
||||
10083,d004,1987-03-31,9999-01-01
|
||||
10084,d004,1995-12-15,9999-01-01
|
||||
10085,d004,1994-04-09,9999-01-01
|
||||
10086,d003,1992-02-19,9999-01-01
|
||||
10087,d007,1997-05-08,2001-01-09
|
||||
10088,d007,1988-09-02,1992-03-21
|
||||
10088,d009,1992-03-21,9999-01-01
|
||||
10089,d007,1989-01-10,9999-01-01
|
||||
10090,d005,1986-03-14,1999-05-07
|
||||
10091,d005,1992-11-18,9999-01-01
|
||||
10092,d005,1996-04-22,9999-01-01
|
||||
10093,d007,1997-06-08,9999-01-01
|
||||
10094,d008,1987-04-18,1997-11-08
|
||||
10095,d007,1994-03-10,9999-01-01
|
||||
10096,d004,1999-01-23,9999-01-01
|
||||
10097,d008,1990-09-15,9999-01-01
|
||||
10098,d004,1985-05-13,1989-06-29
|
||||
10098,d009,1989-06-29,1992-12-11
|
||||
10099,d007,1988-10-18,9999-01-01
|
||||
10100,d003,1987-09-21,9999-01-01
|
|
|
@ -0,0 +1,10 @@
|
|||
dep_id,dep_name
|
||||
d001,Marketing
|
||||
d002,Finance
|
||||
d003,Human Resources
|
||||
d004,Production
|
||||
d005,Development
|
||||
d006,Quality Management
|
||||
d007,Sales
|
||||
d008,Research
|
||||
d009,Customer Service
|
|
|
@ -0,0 +1,112 @@
|
|||
//
|
||||
// Nested documents
|
||||
//
|
||||
// CsvJdbc has issues with foo.bar so msot fields are aliases or wrapped inside a function
|
||||
|
||||
describeParent
|
||||
DESCRIBE test_emp;
|
||||
|
||||
column | type
|
||||
|
||||
birth_date | TIMESTAMP
|
||||
dep | STRUCT
|
||||
dep.dep_id | VARCHAR
|
||||
dep.dep_name | VARCHAR
|
||||
dep.dep_name.keyword | VARCHAR
|
||||
dep.from_date | TIMESTAMP
|
||||
dep.to_date | TIMESTAMP
|
||||
emp_no | INTEGER
|
||||
first_name | VARCHAR
|
||||
first_name.keyword | VARCHAR
|
||||
gender | VARCHAR
|
||||
hire_date | TIMESTAMP
|
||||
languages | TINYINT
|
||||
last_name | VARCHAR
|
||||
last_name.keyword | VARCHAR
|
||||
salary | INTEGER
|
||||
;
|
||||
|
||||
// disable until we figure out how to use field names with . in their name
|
||||
//nestedStar
|
||||
//SELECT dep.* FROM test_emp ORDER BY dep.dep_id LIMIT 5;
|
||||
|
||||
//dep.dep_id:s | dep.dep_name:s | dep.from_date:ts | dep.to_date:ts
|
||||
|
||||
//d001 | Marketing | 744336000000 | 253370764800000
|
||||
//d001 | Marketing | 704332800000 | 806371200000
|
||||
//d001 | Marketing | 577929600000 | 253370764800000
|
||||
//d002 | Finance | 732672000000 | 965865600000
|
||||
//d007 | Sales | 720921600000 | 253370764800000
|
||||
//;
|
||||
|
||||
filterPerNestedWithOrderByTopLevel
|
||||
SELECT first_name f, last_name l, YEAR(dep.from_date) d FROM test_emp WHERE dep.dep_name = 'Production' ORDER BY f LIMIT 5;
|
||||
|
||||
f:s | l:s | d:i
|
||||
|
||||
Alain | Chappelet | 1988
|
||||
Chirstian | Koblick | 1986
|
||||
Duangkaew | Piveteau | 1996
|
||||
Elvis | Demeyer | 1994
|
||||
Gino | Leonhardt | 1989
|
||||
;
|
||||
|
||||
filterPerNestedWithOrderByNested
|
||||
SELECT first_name f, last_name l, YEAR(dep.from_date) d FROM test_emp WHERE dep.dep_name = 'Production' ORDER BY dep.from_date LIMIT 5;
|
||||
|
||||
f:s | l:s | d:i
|
||||
|
||||
Sreekrishna | Servieres | 1985
|
||||
Zhongwei | Rosen | 1986
|
||||
Chirstian | Koblick | 1986
|
||||
Vishv | Zockler | 1987
|
||||
Alain | Chappelet | 1988
|
||||
;
|
||||
|
||||
filterPerNestedWithOrderByNestedWithAlias
|
||||
SELECT first_name f, dep.dep_id i, MONTH(dep.from_date) d FROM test_emp WHERE dep.dep_name = 'Production' ORDER BY i LIMIT 5;
|
||||
|
||||
f:s | i:s | d:i
|
||||
|
||||
Parto | d004 | 12
|
||||
Chirstian | d004 | 12
|
||||
Duangkaew | d004 | 11
|
||||
Kazuhide | d004 | 7
|
||||
Mayuko | d004 | 12
|
||||
;
|
||||
|
||||
filterPerNestedWithOrderByNestedWithoutProjection
|
||||
SELECT first_name f, MONTH(dep.from_date) d FROM test_emp WHERE dep.dep_name = 'Production' ORDER BY dep.dep_id LIMIT 5;
|
||||
|
||||
f:s | d:i
|
||||
|
||||
Parto | 12
|
||||
Chirstian | 12
|
||||
Duangkaew | 11
|
||||
Kazuhide | 7
|
||||
Mayuko | 12
|
||||
;
|
||||
|
||||
selectWithScalarOnNested
|
||||
SELECT first_name f, last_name l, YEAR(dep.from_date) start FROM test_emp WHERE dep.dep_name = 'Production' AND languages > 1 ORDER BY start LIMIT 5;
|
||||
|
||||
f:s | l:s | start:i
|
||||
|
||||
Sreekrishna | Servieres | 1985
|
||||
Zhongwei | Rosen | 1986
|
||||
Chirstian | Koblick | 1986
|
||||
Alain | Chappelet | 1988
|
||||
Zvonko | Nyanchama | 1989
|
||||
;
|
||||
|
||||
selectWithScalarOnNestedWithoutProjection
|
||||
SELECT first_name f, last_name l FROM test_emp WHERE dep.dep_name = 'Production' AND languages > 1 ORDER BY YEAR(dep.from_date) LIMIT 5;
|
||||
|
||||
f:s | l:s
|
||||
|
||||
Sreekrishna | Servieres
|
||||
Zhongwei | Rosen
|
||||
Chirstian | Koblick
|
||||
Alain | Chappelet
|
||||
Zvonko | Nyanchama
|
||||
;
|
|
@ -47,7 +47,7 @@ import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
|||
import org.elasticsearch.xpack.sql.tree.Node;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||
import org.elasticsearch.xpack.sql.type.UnsupportedDataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -352,13 +352,13 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
// if it's a object/compound type, keep it unresolved with a nice error message
|
||||
if (named instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) named;
|
||||
if (fa.dataType() instanceof UnsupportedDataType) {
|
||||
if (DataTypes.isUnsupported(fa.dataType())) {
|
||||
named = u.withUnresolvedMessage(
|
||||
"Cannot use field [" + fa.name() + "], its type [" + fa.dataType().esName() + "] is unsupported");
|
||||
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esName() + "] as is unsupported");
|
||||
}
|
||||
else if (!fa.dataType().isPrimitive()) {
|
||||
named = u.withUnresolvedMessage(
|
||||
"Cannot use field [" + fa.name() + "], type [" + fa.dataType().esName() + "] only its subfields");
|
||||
"Cannot use field [" + fa.name() + "] type [" + fa.dataType().esName() + "] only its subfields");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -407,11 +407,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
// filter the attributes that match based on their path
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.dataType() instanceof UnsupportedDataType) {
|
||||
if (DataTypes.isUnsupported(fa.dataType())) {
|
||||
continue;
|
||||
}
|
||||
if (q.qualifier() != null) {
|
||||
if (Objects.equals(q.qualifiedName(), fa.qualifiedName())) {
|
||||
if (Objects.equals(q.qualifiedName(), fa.qualifiedPath())) {
|
||||
expanded.add(fa.withLocation(attr.location()));
|
||||
}
|
||||
} else {
|
||||
|
@ -424,14 +424,15 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
} else {
|
||||
// add only primitives
|
||||
// but filter out multi fields
|
||||
// but filter out multi fields (allow only the top-level value)
|
||||
Set<Attribute> seenMultiFields = new LinkedHashSet<>();
|
||||
|
||||
for (Attribute a : output) {
|
||||
if (!(a.dataType() instanceof UnsupportedDataType) && a.dataType().isPrimitive()) {
|
||||
if (!DataTypes.isUnsupported(a.dataType()) && a.dataType().isPrimitive()) {
|
||||
if (a instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) a;
|
||||
if (!seenMultiFields.contains(fa.parent())) {
|
||||
// skip nested fields and seen multi-fields
|
||||
if (!fa.isNested() && !seenMultiFields.contains(fa.parent())) {
|
||||
expanded.add(a);
|
||||
seenMultiFields.add(a);
|
||||
}
|
||||
|
|
|
@ -7,9 +7,11 @@ package org.elasticsearch.xpack.sql.analysis.analyzer;
|
|||
|
||||
import org.elasticsearch.xpack.sql.capabilities.Unresolvable;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.AttributeSet;
|
||||
import org.elasticsearch.xpack.sql.expression.Exists;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
|
||||
|
@ -34,6 +36,7 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.lang.String.format;
|
||||
|
||||
|
@ -167,15 +170,8 @@ abstract class Verifier {
|
|||
// if there are no (major) unresolved failures, do more in-depth analysis
|
||||
|
||||
if (failures.isEmpty()) {
|
||||
Map<String, Function> resolvedFunctions = new LinkedHashMap<>();
|
||||
|
||||
// collect Function to better reason about encountered attributes
|
||||
plan.forEachExpressionsDown(e -> {
|
||||
if (e.resolved() && e instanceof Function) {
|
||||
Function f = (Function) e;
|
||||
resolvedFunctions.put(f.functionId(), f);
|
||||
}
|
||||
});
|
||||
Map<String, Function> resolvedFunctions = Functions.collectFunctions(plan);
|
||||
|
||||
// for filtering out duplicated errors
|
||||
final Set<LogicalPlan> groupingFailures = new LinkedHashSet<>();
|
||||
|
@ -198,6 +194,8 @@ abstract class Verifier {
|
|||
|
||||
checkForScoreInsideFunctions(p, localFailures);
|
||||
|
||||
checkNestedUsedInGroupByOrHaving(p, localFailures);
|
||||
|
||||
// everything checks out
|
||||
// mark the plan as analyzed
|
||||
if (localFailures.isEmpty()) {
|
||||
|
@ -390,4 +388,34 @@ abstract class Verifier {
|
|||
.forEach(exp -> localFailures.add(fail(exp, "[SCORE()] cannot be an argument to a function"))),
|
||||
Function.class));
|
||||
}
|
||||
}
|
||||
|
||||
private static void checkNestedUsedInGroupByOrHaving(LogicalPlan p, Set<Failure> localFailures) {
|
||||
List<FieldAttribute> nested = new ArrayList<>();
|
||||
Consumer<FieldAttribute> match = fa -> {
|
||||
if (fa.isNested()) {
|
||||
nested.add(fa);
|
||||
}
|
||||
};
|
||||
|
||||
// nested fields shouldn't be used in aggregates or having (yet)
|
||||
p.forEachDown(a -> a.groupings().forEach(agg -> agg.forEachUp(match, FieldAttribute.class)), Aggregate.class);
|
||||
|
||||
if (!nested.isEmpty()) {
|
||||
localFailures.add(
|
||||
fail(nested.get(0), "Grouping isn't (yet) compatible with nested fields " + new AttributeSet(nested).names()));
|
||||
nested.clear();
|
||||
}
|
||||
|
||||
// check in having
|
||||
p.forEachDown(f -> {
|
||||
if (f.child() instanceof Aggregate) {
|
||||
f.condition().forEachUp(match, FieldAttribute.class);
|
||||
}
|
||||
}, Filter.class);
|
||||
|
||||
if (!nested.isEmpty()) {
|
||||
localFailures.add(
|
||||
fail(nested.get(0), "HAVING isn't (yet) compatible with nested fields " + new AttributeSet(nested).names()));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -48,7 +48,9 @@ import org.elasticsearch.xpack.sql.type.Schema;
|
|||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
// TODO: add retry/back-off
|
||||
public class Scroller {
|
||||
|
@ -276,9 +278,23 @@ public class Scroller {
|
|||
|
||||
if (ref instanceof ComputedRef) {
|
||||
ProcessorDefinition proc = ((ComputedRef) ref).processor();
|
||||
proc = proc.transformDown(l -> new HitExtractorInput(l.expression(),
|
||||
createExtractor(l.context())), ReferenceInput.class);
|
||||
return new ComputingHitExtractor(proc.asProcessor());
|
||||
// collect hitNames
|
||||
Set<String> hitNames = new LinkedHashSet<>();
|
||||
proc = proc.transformDown(l -> {
|
||||
HitExtractor he = createExtractor(l.context());
|
||||
hitNames.add(he.hitName());
|
||||
|
||||
if (hitNames.size() > 1) {
|
||||
throw new SqlIllegalArgumentException("Multi-level nested fields [%s] not supported yet", hitNames);
|
||||
}
|
||||
|
||||
return new HitExtractorInput(l.expression(), he);
|
||||
}, ReferenceInput.class);
|
||||
String hitName = null;
|
||||
if (hitNames.size() == 1) {
|
||||
hitName = hitNames.iterator().next();
|
||||
}
|
||||
return new ComputingHitExtractor(proc.asProcessor(), hitName);
|
||||
}
|
||||
|
||||
throw new SqlIllegalArgumentException("Unexpected ValueReference %s", ref.getClass());
|
||||
|
|
|
@ -62,6 +62,7 @@ public abstract class SourceGenerator {
|
|||
// need to be retrieved from the result documents
|
||||
container.columns().forEach(cr -> cr.collectFields(sortBuilder));
|
||||
sortBuilder.build(source);
|
||||
optimize(sortBuilder, source);
|
||||
|
||||
// add the aggs
|
||||
Aggs aggs = container.aggs();
|
||||
|
@ -127,12 +128,9 @@ public abstract class SourceGenerator {
|
|||
fa = fa.isInexact() ? fa.exactAttribute() : fa;
|
||||
|
||||
sortBuilder = fieldSort(fa.name());
|
||||
if (!fa.isNested()) {
|
||||
sortBuilder = fieldSort(fa.name());
|
||||
} else {
|
||||
if (fa.isNested()) {
|
||||
FieldSortBuilder fieldSort = fieldSort(fa.name());
|
||||
String nestedPath = fa.nestedParent().path();
|
||||
NestedSortBuilder newSort = new NestedSortBuilder(nestedPath);
|
||||
NestedSortBuilder newSort = new NestedSortBuilder(fa.nestedParent().name());
|
||||
NestedSortBuilder nestedSort = fieldSort.getNestedSort();
|
||||
|
||||
if (nestedSort == null) {
|
||||
|
@ -166,13 +164,25 @@ public abstract class SourceGenerator {
|
|||
}
|
||||
}
|
||||
|
||||
private static void optimize(QueryContainer query, SearchSourceBuilder source) {
|
||||
// if only aggs are needed, don't retrieve any docs
|
||||
if (query.isAggsOnly()) {
|
||||
source.size(0);
|
||||
// disable source fetching (only doc values are used)
|
||||
source.fetchSource(FetchSourceContext.DO_NOT_FETCH_SOURCE);
|
||||
source.storedFields(NO_STORED_FIELD);
|
||||
private static void optimize(SqlSourceBuilder sqlSource, SearchSourceBuilder builder) {
|
||||
if (sqlSource.sourceFields.isEmpty()) {
|
||||
disableSource(builder);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void optimize(QueryContainer query, SearchSourceBuilder builder) {
|
||||
// if only aggs are needed, don't retrieve any docs
|
||||
if (query.isAggsOnly()) {
|
||||
builder.size(0);
|
||||
// disable source fetching (only doc values are used)
|
||||
disableSource(builder);
|
||||
}
|
||||
}
|
||||
|
||||
private static void disableSource(SearchSourceBuilder builder) {
|
||||
builder.fetchSource(FetchSourceContext.DO_NOT_FETCH_SOURCE);
|
||||
if (builder.storedFields() == null) {
|
||||
builder.storedFields(NO_STORED_FIELD);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -8,10 +8,7 @@ package org.elasticsearch.xpack.sql.execution.search;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.xpack.sql.execution.search.FieldExtraction;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Map;
|
||||
|
@ -67,8 +64,10 @@ public class SqlSourceBuilder {
|
|||
*/
|
||||
public void build(SearchSourceBuilder sourceBuilder) {
|
||||
sourceBuilder.trackScores(this.trackScores);
|
||||
sourceBuilder.fetchSource(sourceFields.toArray(Strings.EMPTY_ARRAY), null);
|
||||
docFields.forEach(dvf -> sourceBuilder.docValueField(dvf));
|
||||
scriptFields.forEach((k, v) -> sourceBuilder.scriptField(k, v));
|
||||
if (!sourceFields.isEmpty()) {
|
||||
sourceBuilder.fetchSource(sourceFields.toArray(Strings.EMPTY_ARRAY), null);
|
||||
}
|
||||
docFields.forEach(sourceBuilder::docValueField);
|
||||
scriptFields.forEach(sourceBuilder::scriptField);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,18 +32,22 @@ public class ComputingHitExtractor implements HitExtractor {
|
|||
*/
|
||||
static final String NAME = "p";
|
||||
private final Processor processor;
|
||||
private final String hitName;
|
||||
|
||||
public ComputingHitExtractor(Processor processor) {
|
||||
public ComputingHitExtractor(Processor processor, String hitName) {
|
||||
this.processor = processor;
|
||||
this.hitName = hitName;
|
||||
}
|
||||
|
||||
ComputingHitExtractor(StreamInput in) throws IOException {
|
||||
processor = in.readNamedWriteable(Processor.class);
|
||||
hitName = in.readOptionalString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeNamedWriteable(processor);
|
||||
out.writeOptionalString(hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -62,7 +66,7 @@ public class ComputingHitExtractor implements HitExtractor {
|
|||
|
||||
@Override
|
||||
public String hitName() {
|
||||
return null;
|
||||
return hitName;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -71,16 +75,17 @@ public class ComputingHitExtractor implements HitExtractor {
|
|||
return false;
|
||||
}
|
||||
ComputingHitExtractor other = (ComputingHitExtractor) obj;
|
||||
return processor.equals(other.processor);
|
||||
return Objects.equals(processor, other.processor)
|
||||
&& Objects.equals(hitName, other.hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(processor);
|
||||
return Objects.hash(processor, hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return processor.toString();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,6 +10,7 @@ import org.elasticsearch.common.document.DocumentField;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||
import org.joda.time.ReadableDateTime;
|
||||
|
||||
|
@ -32,6 +33,14 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
*/
|
||||
static final String NAME = "f";
|
||||
|
||||
/**
|
||||
* Source extraction requires only the (relative) field name, without its parent path.
|
||||
*/
|
||||
private static String[] sourcePath(String name, boolean useDocValue, String hitName) {
|
||||
return useDocValue ? Strings.EMPTY_ARRAY : Strings
|
||||
.tokenizeToStringArray(hitName == null ? name : name.substring(hitName.length() + 1), ".");
|
||||
}
|
||||
|
||||
private final String fieldName, hitName;
|
||||
private final boolean useDocValue;
|
||||
private final String[] path;
|
||||
|
@ -44,14 +53,21 @@ public class FieldHitExtractor implements HitExtractor {
|
|||
this.fieldName = name;
|
||||
this.useDocValue = useDocValue;
|
||||
this.hitName = hitName;
|
||||
this.path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, ".");
|
||||
|
||||
if (hitName != null) {
|
||||
if (!name.contains(hitName)) {
|
||||
throw new SqlIllegalArgumentException("Hitname [%s] specified but not part of the name [%s]", hitName, name);
|
||||
}
|
||||
}
|
||||
|
||||
this.path = sourcePath(fieldName, useDocValue, hitName);
|
||||
}
|
||||
|
||||
FieldHitExtractor(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
useDocValue = in.readBoolean();
|
||||
hitName = in.readOptionalString();
|
||||
path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, ".");
|
||||
path = sourcePath(fieldName, useDocValue, hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,15 +31,15 @@ public class AttributeSet implements Set<Attribute> {
|
|||
}
|
||||
|
||||
public AttributeSet(Attribute attr) {
|
||||
delegate = new AttributeMap<Object>(attr, PRESENT);
|
||||
delegate = new AttributeMap<>(attr, PRESENT);
|
||||
}
|
||||
|
||||
public AttributeSet(Collection<Attribute> attr) {
|
||||
public AttributeSet(Collection<? extends Attribute> attr) {
|
||||
if (attr.isEmpty()) {
|
||||
delegate = EMPTY_DELEGATE;
|
||||
}
|
||||
else {
|
||||
delegate = new AttributeMap<Object>();
|
||||
delegate = new AttributeMap<>();
|
||||
|
||||
for (Attribute a : attr) {
|
||||
delegate.add(a, PRESENT);
|
||||
|
@ -51,7 +51,7 @@ public class AttributeSet implements Set<Attribute> {
|
|||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
// package protected - should be called through Expressions to cheaply create
|
||||
// package protected - should be called through Expressions to cheaply create
|
||||
// a set from a collection of sets without too much copying
|
||||
void addAll(AttributeSet other) {
|
||||
delegate.addAll(other.delegate);
|
||||
|
@ -73,22 +73,27 @@ public class AttributeSet implements Set<Attribute> {
|
|||
return delegate.attributeNames();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forEach(Consumer<? super Attribute> action) {
|
||||
delegate.forEach((k, v) -> action.accept(k));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return delegate.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return delegate.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return delegate.containsKey(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsAll(Collection<?> c) {
|
||||
for (Object o : c) {
|
||||
if (!delegate.containsKey(o)) {
|
||||
|
@ -98,62 +103,77 @@ public class AttributeSet implements Set<Attribute> {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Attribute> iterator() {
|
||||
return delegate.keySet().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object[] toArray() {
|
||||
return delegate.keySet().toArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> T[] toArray(T[] a) {
|
||||
return delegate.keySet().toArray(a);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(Attribute e) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean addAll(Collection<? extends Attribute> c) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean retainAll(Collection<?> c) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeAll(Collection<?> c) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Spliterator<Attribute> spliterator() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean removeIf(Predicate<? super Attribute> filter) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<Attribute> stream() {
|
||||
return delegate.keySet().stream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Stream<Attribute> parallelStream() {
|
||||
return delegate.keySet().parallelStream();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return delegate.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return delegate.hashCode();
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.expression;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
|
@ -65,7 +66,8 @@ public class FieldAttribute extends TypedAttribute {
|
|||
}
|
||||
|
||||
public String qualifiedPath() {
|
||||
return qualifier() != null ? qualifier() + "." + path : path;
|
||||
// return only the qualifier is there's no path
|
||||
return qualifier() != null ? qualifier() + (Strings.hasText(path) ? "." + path : StringUtils.EMPTY) : path;
|
||||
}
|
||||
|
||||
public boolean isNested() {
|
||||
|
@ -88,11 +90,13 @@ public class FieldAttribute extends TypedAttribute {
|
|||
return innerField(entry.getKey(), entry.getValue());
|
||||
}
|
||||
if (exactFields.isEmpty()) {
|
||||
throw new MappingException("No docValue multi-field defined for %s", name());
|
||||
throw new MappingException(
|
||||
"No keyword/multi-field defined exact matches for [%s]; define one or use MATCH/QUERY instead",
|
||||
name());
|
||||
}
|
||||
// pick the default - keyword
|
||||
if (exactFields.size() > 1) {
|
||||
throw new MappingException("Multiple exact keyword candidates %s available for %s; specify which one to use",
|
||||
throw new MappingException("Multiple exact keyword candidates %s available for [%s]; specify which one to use",
|
||||
exactFields.keySet(), name());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,10 +7,25 @@ package org.elasticsearch.xpack.sql.expression.function;
|
|||
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
||||
import org.elasticsearch.xpack.sql.plan.QueryPlan;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class Functions {
|
||||
|
||||
public static boolean isAggregate(Expression e) {
|
||||
return e instanceof AggregateFunction;
|
||||
}
|
||||
|
||||
public static Map<String, Function> collectFunctions(QueryPlan<?> plan) {
|
||||
Map<String, Function> resolvedFunctions = new LinkedHashMap<>();
|
||||
plan.forEachExpressionsDown(e -> {
|
||||
if (e.resolved() && e instanceof Function) {
|
||||
Function f = (Function) e;
|
||||
resolvedFunctions.put(f.functionId(), f);
|
||||
}
|
||||
});
|
||||
return resolvedFunctions;
|
||||
}
|
||||
}
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTempl
|
|||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -52,7 +53,7 @@ public class Cast extends UnaryScalarFunction {
|
|||
|
||||
@Override
|
||||
public boolean nullable() {
|
||||
return field().nullable() || DataTypeConversion.nullable(from());
|
||||
return field().nullable() || DataTypes.isNull(from());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -92,4 +93,4 @@ public class Cast extends UnaryScalarFunction {
|
|||
public String toString() {
|
||||
return functionName() + "(" + field().toString() + " AS " + to().sqlName() + ")#" + id();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -106,7 +106,7 @@ public abstract class ScalarFunction extends Function {
|
|||
protected abstract ProcessorDefinition makeProcessorDefinition();
|
||||
|
||||
// used if the function is monotonic and thus does not have to be computed for ordering purposes
|
||||
// null means the script needs to be used; expression the field/expression to be used instead
|
||||
// null means the script needs to be used; expression means the field/expression to be used instead
|
||||
public Expression orderBy() {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
|||
import org.elasticsearch.xpack.sql.expression.Order;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Functions;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunction;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.aggregate.ExtendedStats;
|
||||
|
@ -70,6 +71,7 @@ import java.util.Map;
|
|||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import static org.elasticsearch.xpack.sql.expression.Literal.FALSE;
|
||||
|
@ -704,23 +706,39 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
static class PruneOrderByNestedFields extends OptimizerRule<Project> {
|
||||
|
||||
private void findNested(Expression exp, Map<String, Function> functions, Consumer<FieldAttribute> onFind) {
|
||||
exp.forEachUp(e -> {
|
||||
if (e instanceof FunctionAttribute) {
|
||||
FunctionAttribute sfa = (FunctionAttribute) e;
|
||||
Function f = functions.get(sfa.functionId());
|
||||
if (f != null) {
|
||||
findNested(f, functions, onFind);
|
||||
}
|
||||
}
|
||||
if (e instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) e;
|
||||
if (fa.isNested()) {
|
||||
onFind.accept(fa);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected LogicalPlan rule(Project project) {
|
||||
// check whether OrderBy relies on nested fields which are not used higher up
|
||||
if (project.child() instanceof OrderBy) {
|
||||
OrderBy ob = (OrderBy) project.child();
|
||||
|
||||
// count the direct parents
|
||||
// resolve function aliases (that are hiding the target)
|
||||
Map<String, Function> functions = Functions.collectFunctions(project);
|
||||
|
||||
// track the direct parents
|
||||
Map<String, Order> nestedOrders = new LinkedHashMap<>();
|
||||
|
||||
for (Order order : ob.order()) {
|
||||
Attribute attr = ((NamedExpression) order.child()).toAttribute();
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
nestedOrders.put(fa.nestedParent().name(), order);
|
||||
}
|
||||
}
|
||||
// traverse the tree since the field might be wrapped in a function
|
||||
findNested(order.child(), functions, fa -> nestedOrders.put(fa.nestedParent().name(), order));
|
||||
}
|
||||
|
||||
// no nested fields in sort
|
||||
|
@ -731,13 +749,9 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
// count the nested parents (if any) inside the parents
|
||||
List<String> nestedTopFields = new ArrayList<>();
|
||||
|
||||
for (Attribute attr : project.output()) {
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
nestedTopFields.add(fa.nestedParent().name());
|
||||
}
|
||||
}
|
||||
for (NamedExpression ne : project.projections()) {
|
||||
// traverse the tree since the field might be wrapped in a function
|
||||
findNested(ne, functions, fa -> nestedTopFields.add(fa.nestedParent().name()));
|
||||
}
|
||||
|
||||
List<Order> orders = new ArrayList<>(ob.order());
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
package org.elasticsearch.xpack.sql.planner;
|
||||
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.xpack.sql.expression.Alias;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
|
@ -488,9 +487,10 @@ class QueryFolder extends RuleExecutor<PhysicalPlan> {
|
|||
// ignore constant
|
||||
throw new PlanningException("does not know how to order by expression %s", sfa.orderBy());
|
||||
}
|
||||
} else {
|
||||
// nope, use scripted sorting
|
||||
qContainer = qContainer.sort(new ScriptSort(sfa.script(), direction));
|
||||
}
|
||||
// nope, use scripted sorting
|
||||
qContainer = qContainer.sort(new ScriptSort(sfa.script(), direction));
|
||||
} else if (attr instanceof ScoreAttribute) {
|
||||
qContainer = qContainer.sort(new ScoreSort(direction));
|
||||
} else {
|
||||
|
|
|
@ -18,9 +18,7 @@ import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
|||
import org.elasticsearch.xpack.sql.expression.LiteralAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.ScoreAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AttributeInput;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ScoreProcessorDefinition;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.Aggs;
|
||||
|
@ -31,6 +29,7 @@ import org.elasticsearch.xpack.sql.querydsl.query.MatchAll;
|
|||
import org.elasticsearch.xpack.sql.querydsl.query.NestedQuery;
|
||||
import org.elasticsearch.xpack.sql.querydsl.query.Query;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
@ -40,8 +39,6 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -180,18 +177,19 @@ public class QueryContainer {
|
|||
//
|
||||
// reference methods
|
||||
//
|
||||
private FieldExtraction searchHitFieldRef(FieldAttribute fieldAttr) {
|
||||
private FieldExtraction topHitFieldRef(FieldAttribute fieldAttr) {
|
||||
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues());
|
||||
}
|
||||
|
||||
private Tuple<QueryContainer, FieldExtraction> nestedFieldRef(FieldAttribute attr) {
|
||||
private Tuple<QueryContainer, FieldExtraction> nestedHitFieldRef(FieldAttribute attr) {
|
||||
// Find the nested query for this field. If there isn't one then create it
|
||||
List<FieldExtraction> nestedRefs = new ArrayList<>();
|
||||
|
||||
String name = aliasName(attr);
|
||||
Query q = rewriteToContainNestedField(query, attr.location(),
|
||||
attr.nestedParent().path(), aliasName(attr), attr.dataType().hasDocValues());
|
||||
attr.nestedParent().name(), name, attr.dataType().hasDocValues());
|
||||
|
||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(attr.name(), attr.dataType().hasDocValues(), attr.parent().name());
|
||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(name, attr.dataType().hasDocValues(), attr.parent().name());
|
||||
nestedRefs.add(nestedFieldRef);
|
||||
|
||||
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
|
||||
|
@ -271,9 +269,9 @@ public class QueryContainer {
|
|||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
return nestedFieldRef(fa);
|
||||
return nestedHitFieldRef(fa);
|
||||
} else {
|
||||
return new Tuple<>(this, searchHitFieldRef(fa));
|
||||
return new Tuple<>(this, topHitFieldRef(fa));
|
||||
}
|
||||
}
|
||||
if (attr instanceof ScalarFunctionAttribute) {
|
||||
|
|
|
@ -37,6 +37,10 @@ public class SearchHitFieldRef extends FieldReference {
|
|||
|
||||
@Override
|
||||
public void collectFields(SqlSourceBuilder sourceBuilder) {
|
||||
// nested fields are handled by inner hits
|
||||
if (hitName != null) {
|
||||
return;
|
||||
}
|
||||
if (docValue) {
|
||||
sourceBuilder.addDocField(name);
|
||||
} else {
|
||||
|
|
|
@ -28,10 +28,10 @@ public abstract class DataTypeConversion {
|
|||
if (left.same(right)) {
|
||||
return left;
|
||||
}
|
||||
if (nullable(left)) {
|
||||
if (DataTypes.isNull(left)) {
|
||||
return right;
|
||||
}
|
||||
if (nullable(right)) {
|
||||
if (DataTypes.isNull(right)) {
|
||||
return left;
|
||||
}
|
||||
if (left.isNumeric() && right.isNumeric()) {
|
||||
|
@ -65,10 +65,6 @@ public abstract class DataTypeConversion {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static boolean nullable(DataType from) {
|
||||
return from instanceof NullType;
|
||||
}
|
||||
|
||||
public static boolean canConvert(DataType from, DataType to) { // TODO it'd be cleaner and more right to fetch the conversion
|
||||
// only primitives are supported so far
|
||||
if (!from.isPrimitive() || !to.isPrimitive()) {
|
||||
|
@ -317,7 +313,7 @@ public abstract class DataTypeConversion {
|
|||
* is important because it is used for serialization.
|
||||
*/
|
||||
public enum Conversion {
|
||||
DATE_TO_STRING(fromLong(UTC_DATE_FORMATTER::print)),
|
||||
DATE_TO_STRING(Object::toString),
|
||||
OTHER_TO_STRING(String::valueOf),
|
||||
RATIONAL_TO_LONG(fromDouble(DataTypeConversion::safeToLong)),
|
||||
INTEGER_TO_LONG(fromLong(value -> value)),
|
||||
|
|
|
@ -77,6 +77,14 @@ public abstract class DataTypes {
|
|||
ES_PRIMITIVES_NO_DOC_VALUES.put(type.esName(), type);
|
||||
}
|
||||
|
||||
public static boolean isNull(DataType from) {
|
||||
return from instanceof NullType;
|
||||
}
|
||||
|
||||
public static boolean isUnsupported(DataType from) {
|
||||
return from instanceof UnsupportedDataType;
|
||||
}
|
||||
|
||||
public static DataType fromJava(Object value) {
|
||||
if (value == null) {
|
||||
return NULL;
|
||||
|
|
|
@ -106,7 +106,7 @@ public class FieldAttributeTests extends ESTestCase {
|
|||
assertThat(attr.isInexact(), is(true));
|
||||
MappingException me = expectThrows(MappingException.class, () -> attr.exactAttribute());
|
||||
assertThat(me.getMessage(),
|
||||
is("Multiple exact keyword candidates [one, two] available for some.ambiguous; specify which one to use"));
|
||||
is("Multiple exact keyword candidates [one, two] available for [some.ambiguous]; specify which one to use"));
|
||||
}
|
||||
|
||||
public void testNormalizedKeyword() {
|
||||
|
@ -118,12 +118,12 @@ public class FieldAttributeTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testDottedFieldPath() {
|
||||
assertThat(error("some"), is("Found 1 problem(s)\nline 1:8: Cannot use field [some], type [object] only its subfields"));
|
||||
assertThat(error("some"), is("Found 1 problem(s)\nline 1:8: Cannot use field [some] type [object] only its subfields"));
|
||||
}
|
||||
|
||||
public void testDottedFieldPathDeeper() {
|
||||
assertThat(error("some.dotted"),
|
||||
is("Found 1 problem(s)\nline 1:8: Cannot use field [some.dotted], type [object] only its subfields"));
|
||||
is("Found 1 problem(s)\nline 1:8: Cannot use field [some.dotted] type [object] only its subfields"));
|
||||
}
|
||||
|
||||
public void testDottedFieldPathTypo() {
|
||||
|
|
|
@ -21,7 +21,7 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
private SqlParser parser = new SqlParser(DateTimeZone.UTC);
|
||||
|
||||
private String verify(String sql) {
|
||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-with-nested.json");
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
return verify(IndexResolution.valid(test), sql);
|
||||
}
|
||||
|
@ -108,13 +108,23 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
verify("SELECT AVG(int) FROM test GROUP BY AVG(int)"));
|
||||
}
|
||||
|
||||
public void testGroupByOnNested() {
|
||||
assertEquals("1:38: Grouping isn't (yet) compatible with nested fields [dep.dep_id]",
|
||||
verify("SELECT dep.dep_id FROM test GROUP BY dep.dep_id"));
|
||||
}
|
||||
|
||||
public void testHavingOnNested() {
|
||||
assertEquals("1:51: HAVING isn't (yet) compatible with nested fields [dep.start_date]",
|
||||
verify("SELECT int FROM test GROUP BY int HAVING AVG(YEAR(dep.start_date)) > 1980"));
|
||||
}
|
||||
|
||||
public void testGroupByScalarFunctionWithAggOnTarget() {
|
||||
assertEquals("1:31: Cannot use an aggregate [AVG] for grouping",
|
||||
verify("SELECT int FROM test GROUP BY AVG(int) + 2"));
|
||||
}
|
||||
|
||||
public void testUnsupportedType() {
|
||||
assertEquals("1:8: Cannot use field [unsupported], its type [ip_range] is unsupported",
|
||||
assertEquals("1:8: Cannot use field [unsupported] type [ip_range] as is unsupported",
|
||||
verify("SELECT unsupported FROM test"));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -29,7 +29,9 @@ import static org.hamcrest.Matchers.is;
|
|||
|
||||
public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<FieldHitExtractor> {
|
||||
public static FieldHitExtractor randomFieldHitExtractor() {
|
||||
return new FieldHitExtractor(randomAlphaOfLength(5), randomBoolean(), randomAlphaOfLength(5));
|
||||
String hitName = randomAlphaOfLength(5);
|
||||
String name = randomAlphaOfLength(5) + "." + hitName;
|
||||
return new FieldHitExtractor(name, randomBoolean(), hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -162,7 +164,7 @@ public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<Fiel
|
|||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("field@hit", new FieldHitExtractor("field", true, "hit").toString());
|
||||
assertEquals("hit.field@hit", new FieldHitExtractor("hit.field", true, "hit").toString());
|
||||
}
|
||||
|
||||
public void testMultiValuedDocValue() {
|
||||
|
|
|
@ -31,7 +31,7 @@ import static java.util.Collections.singletonMap;
|
|||
|
||||
public class ProcessingHitExtractorTests extends AbstractWireSerializingTestCase<ComputingHitExtractor> {
|
||||
public static ComputingHitExtractor randomProcessingHitExtractor(int depth) {
|
||||
return new ComputingHitExtractor(randomProcessor(0));
|
||||
return new ComputingHitExtractor(randomProcessor(0), randomAlphaOfLength(10));
|
||||
}
|
||||
|
||||
public static Processor randomProcessor(int depth) {
|
||||
|
@ -64,7 +64,9 @@ public class ProcessingHitExtractorTests extends AbstractWireSerializingTestCase
|
|||
@Override
|
||||
protected ComputingHitExtractor mutateInstance(ComputingHitExtractor instance) throws IOException {
|
||||
return new ComputingHitExtractor(
|
||||
randomValueOtherThan(instance.processor(), () -> randomProcessor(0)));
|
||||
randomValueOtherThan(instance.processor(), () -> randomProcessor(0)),
|
||||
randomValueOtherThan(instance.hitName(), () -> randomAlphaOfLength(10))
|
||||
);
|
||||
}
|
||||
|
||||
public void testGet() {
|
||||
|
|
|
@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.type;
|
|||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion.Conversion;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
public class DataTypeConversionTests extends ESTestCase {
|
||||
public void testConversionToString() {
|
||||
|
@ -17,11 +19,11 @@ public class DataTypeConversionTests extends ESTestCase {
|
|||
|
||||
conversion = DataTypeConversion.conversionFor(new DateType(true), KeywordType.DEFAULT);
|
||||
assertNull(conversion.convert(null));
|
||||
assertEquals("1970-01-01T00:00:00Z", conversion.convert(0));
|
||||
assertEquals("1970-01-01T00:00:00.000Z", conversion.convert(new DateTime(0, DateTimeZone.UTC)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test conversion to a date or long. These are almost the same.
|
||||
* Test conversion to a date or long. These are almost the same.
|
||||
*/
|
||||
public void testConversionToLongOrDate() {
|
||||
DataType to = randomBoolean() ? new LongType(true) : new DateType(true);
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
"properties" : {
|
||||
"bool" : { "type" : "boolean" },
|
||||
"int" : { "type" : "integer" },
|
||||
"text" : { "type" : "text" },
|
||||
"keyword" : { "type" : "keyword" },
|
||||
"unsupported" : { "type" : "ip_range" },
|
||||
"some" : {
|
||||
"properties" : {
|
||||
"dotted" : {
|
||||
"properties" : {
|
||||
"field" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"string" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"normalized" : {
|
||||
"type" : "keyword",
|
||||
"normalizer" : "some_normalizer"
|
||||
},
|
||||
"typical" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ambiguous" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"one" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"two" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"normalized" : {
|
||||
"type" : "keyword",
|
||||
"normalizer" : "some_normalizer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"dep" : {
|
||||
"type" : "nested",
|
||||
"properties" : {
|
||||
"dep_name" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"dep_id" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"keyword" : {
|
||||
"type" : "keyword",
|
||||
"ignore_above" : 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"end_date" : {
|
||||
"type" : "date"
|
||||
},
|
||||
"start_date" : {
|
||||
"type" : "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue