SQL: Add support for object/inner/dotted fields (elastic/x-pack-elasticsearch#3368)
* SQL: Add support for object/inner/dotted fields Improve validation when selecting compound fields Improve fallback to exact field for text fields Consolidate various field attributes into one Consolidate various field HitExtractors into one Improve grammar for column declaration Update response of embedded server Improve index validation to allow use of internal indices Add clean-up of scroll in case of server errors By default thrown an exception on multi-valued fields Original commit: elastic/x-pack-elasticsearch@45b73fe0dc
This commit is contained in:
parent
e18c7e92fa
commit
368099dcbd
|
@ -25,15 +25,15 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
|
||||
assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test"), containsString("plan"));
|
||||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Project[[test_field{r}#"));
|
||||
assertThat(readLine(), startsWith("Project[[test_field{f}#"));
|
||||
assertThat(readLine(), startsWith("\\_SubQueryAlias[test]"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][test_field{r}#"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][test_field{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test"), containsString("plan"));
|
||||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Project[[test_field{r}#"));
|
||||
assertThat(readLine(), startsWith("\\_EsRelation[test][test_field{r}#"));
|
||||
assertThat(readLine(), startsWith("Project[[test_field{f}#"));
|
||||
assertThat(readLine(), startsWith("\\_EsRelation[test][test_field{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
// TODO in this case we should probably remove the source filtering entirely. Right? It costs but we don't need it.
|
||||
|
@ -72,17 +72,17 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test WHERE i = 2"),
|
||||
containsString("plan"));
|
||||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Project[[i{r}#"));
|
||||
assertThat(readLine(), startsWith("\\_Filter[i{r}#"));
|
||||
assertThat(readLine(), startsWith("Project[[i{f}#"));
|
||||
assertThat(readLine(), startsWith("\\_Filter[i{f}#"));
|
||||
assertThat(readLine(), startsWith(" \\_SubQueryAlias[test]"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2"), containsString("plan"));
|
||||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Project[[i{r}#"));
|
||||
assertThat(readLine(), startsWith("\\_Filter[i{r}#"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#"));
|
||||
assertThat(readLine(), startsWith("Project[[i{f}#"));
|
||||
assertThat(readLine(), startsWith("\\_Filter[i{f}#"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
assertThat(command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test WHERE i = 2"), containsString("plan"));
|
||||
|
@ -132,13 +132,13 @@ public class CliExplainIT extends CliIntegrationTestCase {
|
|||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
||||
assertThat(readLine(), startsWith("\\_SubQueryAlias[test]"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#"));
|
||||
assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test"), containsString("plan"));
|
||||
assertThat(readLine(), startsWith("----------"));
|
||||
assertThat(readLine(), startsWith("Aggregate[[],[COUNT(1)#"));
|
||||
assertThat(readLine(), startsWith("\\_EsRelation[test][i{r}#"));
|
||||
assertThat(readLine(), startsWith("\\_EsRelation[test][i{f}#"));
|
||||
assertEquals("", readLine());
|
||||
|
||||
assertThat(command("EXPLAIN (PLAN EXECUTABLE) SELECT COUNT(*) FROM test"), containsString("plan"));
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.qa.sql.embed;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.xpack.sql.cli.net.protocol.Proto;
|
||||
import org.elasticsearch.xpack.sql.plugin.RestSqlCliAction;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
class CliProtoHandler extends ProtoHandler {
|
||||
private final RestSqlCliAction action;
|
||||
|
||||
CliProtoHandler(Client client) {
|
||||
super(client);
|
||||
action = new RestSqlCliAction(Settings.EMPTY, mock(RestController.class));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void handle(RestChannel channel, DataInput in) throws IOException {
|
||||
action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel);
|
||||
}
|
||||
}
|
|
@ -5,12 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.qa.sql.embed;
|
||||
|
||||
import com.sun.net.httpserver.HttpExchange;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.test.rest.FakeRestChannel;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
||||
import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto;
|
||||
import org.elasticsearch.xpack.sql.plugin.RestSqlJdbcAction;
|
||||
|
@ -31,14 +29,7 @@ class JdbcProtoHandler extends ProtoHandler {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void handle(HttpExchange http, DataInput in) throws IOException {
|
||||
FakeRestChannel channel = new FakeRestChannel(new FakeRestRequest(), true, 1);
|
||||
try {
|
||||
action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel);
|
||||
while (false == channel.await()) {}
|
||||
sendHttpResponse(http, channel.capturedResponse().content());
|
||||
} catch (Exception e) {
|
||||
fail(http, e);
|
||||
}
|
||||
protected void handle(RestChannel channel, DataInput in) throws IOException {
|
||||
action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel);
|
||||
}
|
||||
}
|
|
@ -5,6 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.qa.sql.embed;
|
||||
|
||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||
|
||||
import com.sun.net.httpserver.Headers;
|
||||
import com.sun.net.httpserver.HttpExchange;
|
||||
import com.sun.net.httpserver.HttpHandler;
|
||||
|
||||
|
@ -12,10 +15,15 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.test.rest.FakeRestChannel;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.IndexResolver;
|
||||
import org.elasticsearch.xpack.sql.execution.PlanExecutor;
|
||||
|
||||
|
@ -23,6 +31,9 @@ import java.io.DataInput;
|
|||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
public abstract class ProtoHandler implements HttpHandler, AutoCloseable {
|
||||
|
||||
private static PlanExecutor planExecutor(EmbeddedModeFilterClient client) {
|
||||
|
@ -38,8 +49,7 @@ public abstract class ProtoHandler implements HttpHandler, AutoCloseable {
|
|||
|
||||
protected ProtoHandler(Client client) {
|
||||
NodesInfoResponse niResponse = client.admin().cluster().prepareNodesInfo("_local").clear().get(TV);
|
||||
this.client = !(client instanceof EmbeddedModeFilterClient) ? new EmbeddedModeFilterClient(
|
||||
client) : (EmbeddedModeFilterClient) client;
|
||||
this.client = client instanceof EmbeddedModeFilterClient ? (EmbeddedModeFilterClient) client : new EmbeddedModeFilterClient(client);
|
||||
this.client.setPlanExecutor(planExecutor(this.client));
|
||||
info = niResponse.getNodes().get(0);
|
||||
clusterName = niResponse.getClusterName().value();
|
||||
|
@ -55,32 +65,34 @@ public abstract class ProtoHandler implements HttpHandler, AutoCloseable {
|
|||
return;
|
||||
}
|
||||
|
||||
FakeRestChannel channel = new FakeRestChannel(
|
||||
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(singletonMap("error_trace", "")).build(), true, 1);
|
||||
try (DataInputStream in = new DataInputStream(http.getRequestBody())) {
|
||||
handle(http, in);
|
||||
} catch (Exception ex) {
|
||||
fail(http, ex);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void handle(HttpExchange http, DataInput in) throws IOException;
|
||||
|
||||
protected void sendHttpResponse(HttpExchange http, BytesReference response) throws IOException {
|
||||
// first do the conversion in case an exception is triggered
|
||||
if (http.getResponseHeaders().isEmpty()) {
|
||||
http.sendResponseHeaders(RestStatus.OK.getStatus(), 0);
|
||||
}
|
||||
response.writeTo(http.getResponseBody());
|
||||
http.close();
|
||||
}
|
||||
|
||||
protected void fail(HttpExchange http, Exception ex) {
|
||||
log.error("Caught error while transmitting response", ex);
|
||||
try {
|
||||
// the error conversion has failed, halt
|
||||
if (http.getResponseHeaders().isEmpty()) {
|
||||
http.sendResponseHeaders(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), -1);
|
||||
handle(channel, in);
|
||||
while (false == channel.await()) {
|
||||
}
|
||||
} catch (IOException ioEx) {
|
||||
sendHttpResponse(http, channel.capturedResponse());
|
||||
} catch (Exception e) {
|
||||
sendHttpResponse(http, new BytesRestResponse(channel, e));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void handle(RestChannel channel, DataInput in) throws IOException;
|
||||
|
||||
protected void sendHttpResponse(HttpExchange http, RestResponse response) throws IOException {
|
||||
try {
|
||||
// first do the conversion in case an exception is triggered
|
||||
if (http.getResponseHeaders().isEmpty()) {
|
||||
http.sendResponseHeaders(response.status().getStatus(), response.content().length());
|
||||
|
||||
Headers headers = http.getResponseHeaders();
|
||||
headers.putIfAbsent(HttpHeaderNames.CONTENT_TYPE.toString(), singletonList(response.contentType()));
|
||||
if (response.getHeaders() != null) {
|
||||
headers.putAll(response.getHeaders());
|
||||
}
|
||||
}
|
||||
response.content().writeTo(http.getResponseBody());
|
||||
} catch (IOException ex) {
|
||||
log.error("Caught error while trying to catch error", ex);
|
||||
} finally {
|
||||
http.close();
|
||||
|
|
|
@ -93,7 +93,7 @@ public class DataLoader {
|
|||
if (titlesString == null) {
|
||||
throw new IllegalArgumentException("[" + location + "] must contain at least a title row");
|
||||
}
|
||||
List<String> titles = Arrays.asList(titlesString.split(","));
|
||||
List<String> titles = Arrays.asList(titlesString.split(","));
|
||||
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
@ -106,4 +106,5 @@ public class DataLoader {
|
|||
public static InputStream readFromJarUrl(URL source) throws IOException {
|
||||
return source.openStream();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -3,7 +3,7 @@
|
|||
//
|
||||
|
||||
debug
|
||||
SELECT int FROM test GROUP BY AVG(int) + 2;
|
||||
SELECT 5 + 2 AS a;
|
||||
|
||||
table:s
|
||||
test_emp
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
//
|
||||
|
||||
debug
|
||||
SELECT int FROM test GROUP BY AVG(int) + 2;
|
||||
SELECT 5 + 2 AS a;
|
||||
|
|
|
@ -188,18 +188,15 @@ primaryExpression
|
|||
| EXTRACT '(' field=identifier FROM valueExpression ')' #extract
|
||||
| constant #constantDefault
|
||||
| ASTERISK #star
|
||||
| (qualifier=columnExpression '.')? ASTERISK #star
|
||||
| (qualifiedName '.')? ASTERISK #star
|
||||
| identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall
|
||||
| '(' query ')' #subqueryExpression
|
||||
| columnExpression #columnReference
|
||||
| base=columnExpression '.' fieldName=identifier #dereference
|
||||
| identifier #columnReference
|
||||
| qualifiedName #dereference
|
||||
| '(' expression ')' #parenthesizedExpression
|
||||
;
|
||||
|
||||
columnExpression
|
||||
: ((alias=identifier | table=tableIdentifier) '.' )? name=identifier
|
||||
;
|
||||
|
||||
|
||||
constant
|
||||
: NULL #nullLiteral
|
||||
| identifier STRING #typeConstructor
|
||||
|
@ -221,7 +218,7 @@ dataType
|
|||
;
|
||||
|
||||
qualifiedName
|
||||
: identifier ('.' identifier)*
|
||||
: (path=identifier '.')* name=identifier
|
||||
;
|
||||
|
||||
tableIdentifier
|
||||
|
@ -251,16 +248,16 @@ number
|
|||
|
||||
// http://developer.mimer.se/validator/sql-reserved-words.tml
|
||||
nonReserved
|
||||
: ANALYZE | ANALYZED
|
||||
| COLUMNS
|
||||
| DEBUG
|
||||
| EXECUTABLE | EXPLAIN
|
||||
| FORMAT | FUNCTIONS | FROM
|
||||
| GRAPHVIZ
|
||||
| MAPPED
|
||||
| OPTIMIZED
|
||||
| PARSED | PHYSICAL | PLAN
|
||||
| QUERY
|
||||
: ANALYZE | ANALYZED
|
||||
| COLUMNS
|
||||
| DEBUG
|
||||
| EXECUTABLE | EXPLAIN
|
||||
| FORMAT | FUNCTIONS | FROM
|
||||
| GRAPHVIZ
|
||||
| MAPPED
|
||||
| OPTIMIZED
|
||||
| PARSED | PHYSICAL | PLAN
|
||||
| QUERY
|
||||
| RLIKE
|
||||
| SCHEMAS | SHOW
|
||||
| TABLES | TEXT
|
||||
|
@ -325,7 +322,6 @@ TEXT: 'TEXT';
|
|||
TRUE: 'TRUE';
|
||||
USING: 'USING';
|
||||
VERIFY: 'VERIFY';
|
||||
WHEN: 'WHEN';
|
||||
WHERE: 'WHERE';
|
||||
WITH: 'WITH';
|
||||
|
||||
|
|
|
@ -60,33 +60,32 @@ TEXT=59
|
|||
TRUE=60
|
||||
USING=61
|
||||
VERIFY=62
|
||||
WHEN=63
|
||||
WHERE=64
|
||||
WITH=65
|
||||
EQ=66
|
||||
NEQ=67
|
||||
LT=68
|
||||
LTE=69
|
||||
GT=70
|
||||
GTE=71
|
||||
PLUS=72
|
||||
MINUS=73
|
||||
ASTERISK=74
|
||||
SLASH=75
|
||||
PERCENT=76
|
||||
CONCAT=77
|
||||
STRING=78
|
||||
INTEGER_VALUE=79
|
||||
DECIMAL_VALUE=80
|
||||
IDENTIFIER=81
|
||||
DIGIT_IDENTIFIER=82
|
||||
QUOTED_IDENTIFIER=83
|
||||
BACKQUOTED_IDENTIFIER=84
|
||||
SIMPLE_COMMENT=85
|
||||
BRACKETED_COMMENT=86
|
||||
WS=87
|
||||
UNRECOGNIZED=88
|
||||
DELIMITER=89
|
||||
WHERE=63
|
||||
WITH=64
|
||||
EQ=65
|
||||
NEQ=66
|
||||
LT=67
|
||||
LTE=68
|
||||
GT=69
|
||||
GTE=70
|
||||
PLUS=71
|
||||
MINUS=72
|
||||
ASTERISK=73
|
||||
SLASH=74
|
||||
PERCENT=75
|
||||
CONCAT=76
|
||||
STRING=77
|
||||
INTEGER_VALUE=78
|
||||
DECIMAL_VALUE=79
|
||||
IDENTIFIER=80
|
||||
DIGIT_IDENTIFIER=81
|
||||
QUOTED_IDENTIFIER=82
|
||||
BACKQUOTED_IDENTIFIER=83
|
||||
SIMPLE_COMMENT=84
|
||||
BRACKETED_COMMENT=85
|
||||
WS=86
|
||||
UNRECOGNIZED=87
|
||||
DELIMITER=88
|
||||
'('=1
|
||||
')'=2
|
||||
','=3
|
||||
|
@ -149,17 +148,16 @@ DELIMITER=89
|
|||
'TRUE'=60
|
||||
'USING'=61
|
||||
'VERIFY'=62
|
||||
'WHEN'=63
|
||||
'WHERE'=64
|
||||
'WITH'=65
|
||||
'='=66
|
||||
'<'=68
|
||||
'<='=69
|
||||
'>'=70
|
||||
'>='=71
|
||||
'+'=72
|
||||
'-'=73
|
||||
'*'=74
|
||||
'/'=75
|
||||
'%'=76
|
||||
'||'=77
|
||||
'WHERE'=63
|
||||
'WITH'=64
|
||||
'='=65
|
||||
'<'=67
|
||||
'<='=68
|
||||
'>'=69
|
||||
'>='=70
|
||||
'+'=71
|
||||
'-'=72
|
||||
'*'=73
|
||||
'/'=74
|
||||
'%'=75
|
||||
'||'=76
|
||||
|
|
|
@ -60,32 +60,31 @@ TEXT=59
|
|||
TRUE=60
|
||||
USING=61
|
||||
VERIFY=62
|
||||
WHEN=63
|
||||
WHERE=64
|
||||
WITH=65
|
||||
EQ=66
|
||||
NEQ=67
|
||||
LT=68
|
||||
LTE=69
|
||||
GT=70
|
||||
GTE=71
|
||||
PLUS=72
|
||||
MINUS=73
|
||||
ASTERISK=74
|
||||
SLASH=75
|
||||
PERCENT=76
|
||||
CONCAT=77
|
||||
STRING=78
|
||||
INTEGER_VALUE=79
|
||||
DECIMAL_VALUE=80
|
||||
IDENTIFIER=81
|
||||
DIGIT_IDENTIFIER=82
|
||||
QUOTED_IDENTIFIER=83
|
||||
BACKQUOTED_IDENTIFIER=84
|
||||
SIMPLE_COMMENT=85
|
||||
BRACKETED_COMMENT=86
|
||||
WS=87
|
||||
UNRECOGNIZED=88
|
||||
WHERE=63
|
||||
WITH=64
|
||||
EQ=65
|
||||
NEQ=66
|
||||
LT=67
|
||||
LTE=68
|
||||
GT=69
|
||||
GTE=70
|
||||
PLUS=71
|
||||
MINUS=72
|
||||
ASTERISK=73
|
||||
SLASH=74
|
||||
PERCENT=75
|
||||
CONCAT=76
|
||||
STRING=77
|
||||
INTEGER_VALUE=78
|
||||
DECIMAL_VALUE=79
|
||||
IDENTIFIER=80
|
||||
DIGIT_IDENTIFIER=81
|
||||
QUOTED_IDENTIFIER=82
|
||||
BACKQUOTED_IDENTIFIER=83
|
||||
SIMPLE_COMMENT=84
|
||||
BRACKETED_COMMENT=85
|
||||
WS=86
|
||||
UNRECOGNIZED=87
|
||||
'('=1
|
||||
')'=2
|
||||
','=3
|
||||
|
@ -148,17 +147,16 @@ UNRECOGNIZED=88
|
|||
'TRUE'=60
|
||||
'USING'=61
|
||||
'VERIFY'=62
|
||||
'WHEN'=63
|
||||
'WHERE'=64
|
||||
'WITH'=65
|
||||
'='=66
|
||||
'<'=68
|
||||
'<='=69
|
||||
'>'=70
|
||||
'>='=71
|
||||
'+'=72
|
||||
'-'=73
|
||||
'*'=74
|
||||
'/'=75
|
||||
'%'=76
|
||||
'||'=77
|
||||
'WHERE'=63
|
||||
'WITH'=64
|
||||
'='=65
|
||||
'<'=67
|
||||
'<='=68
|
||||
'>'=69
|
||||
'>='=70
|
||||
'+'=71
|
||||
'-'=72
|
||||
'*'=73
|
||||
'/'=74
|
||||
'%'=75
|
||||
'||'=76
|
||||
|
|
|
@ -14,12 +14,11 @@ import org.elasticsearch.xpack.sql.expression.Attribute;
|
|||
import org.elasticsearch.xpack.sql.expression.AttributeSet;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Order;
|
||||
import org.elasticsearch.xpack.sql.expression.SubQueryExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.TypedAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.UnresolvedAlias;
|
||||
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.UnresolvedStar;
|
||||
|
@ -46,7 +45,6 @@ import org.elasticsearch.xpack.sql.plan.logical.With;
|
|||
import org.elasticsearch.xpack.sql.rule.Rule;
|
||||
import org.elasticsearch.xpack.sql.rule.RuleExecutor;
|
||||
import org.elasticsearch.xpack.sql.tree.Node;
|
||||
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypeConversion;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
@ -62,7 +60,6 @@ import java.util.Locale;
|
|||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
|
@ -163,27 +160,15 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
private static Attribute resolveAgainstList(UnresolvedAttribute u, List<Attribute> attrList, boolean lenient) {
|
||||
List<Attribute> matches = new ArrayList<>();
|
||||
|
||||
// use the qualifier if present
|
||||
if (u.qualifier() != null) {
|
||||
for (Attribute attribute : attrList) {
|
||||
if (!attribute.synthetic()) {
|
||||
if (Objects.equals(u.qualifiedName(), attribute.qualifiedName())) {
|
||||
matches.add(attribute);
|
||||
}
|
||||
if (attribute instanceof NestedFieldAttribute) {
|
||||
// since u might be unqualified but the parent shows up as a qualifier
|
||||
if (Objects.equals(u.qualifiedName(), attribute.name())) {
|
||||
matches.add(attribute.withLocation(u.location()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// first try the qualified version
|
||||
boolean qualified = u.qualifier() != null;
|
||||
|
||||
// if none is found, try to do a match just on the name (to filter out missing qualifiers)
|
||||
if (matches.isEmpty()) {
|
||||
for (Attribute attribute : attrList) {
|
||||
if (!attribute.synthetic() && Objects.equals(u.name(), attribute.name())) {
|
||||
for (Attribute attribute : attrList) {
|
||||
if (!attribute.synthetic()) {
|
||||
boolean match = qualified ?
|
||||
Objects.equals(u.qualifiedName(), attribute.qualifiedName()) :
|
||||
Objects.equals(u.name(), attribute.name());
|
||||
if (match) {
|
||||
matches.add(attribute.withLocation(u.location()));
|
||||
}
|
||||
}
|
||||
|
@ -356,17 +341,18 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
return plan.transformExpressionsUp(e -> {
|
||||
if (e instanceof UnresolvedAttribute) {
|
||||
UnresolvedAttribute u = (UnresolvedAttribute) e;
|
||||
NamedExpression named = resolveAgainstList(u,
|
||||
plan.children().stream()
|
||||
.flatMap(c -> c.output().stream())
|
||||
.collect(toList()),
|
||||
false);
|
||||
List<Attribute> childrenOutput = new ArrayList<>();
|
||||
for (LogicalPlan child : plan.children()) {
|
||||
childrenOutput.addAll(child.output());
|
||||
}
|
||||
NamedExpression named = resolveAgainstList(u, childrenOutput, false);
|
||||
// if resolved, return it; otherwise keep it in place to be resolved later
|
||||
if (named != null) {
|
||||
// it's a compound type so convert it
|
||||
if (named instanceof TypedAttribute && ((TypedAttribute) named).dataType() instanceof CompoundDataType) {
|
||||
named = new UnresolvedStar(e.location(),
|
||||
new UnresolvedAttribute(e.location(), u.name(), u.qualifier()));
|
||||
// if it's a object/compound type, keep it unresolved with a nice error message
|
||||
if (named instanceof FieldAttribute && !((FieldAttribute) named).dataType().isPrimitive()) {
|
||||
FieldAttribute fa = (FieldAttribute) named;
|
||||
named = u.withUnresolvedMessage(
|
||||
"Cannot use field [" + fa.name() + "] (type " + fa.dataType().esName() + ") only its subfields");
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
|
@ -381,42 +367,71 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
|
||||
private List<NamedExpression> expandProjections(List<? extends NamedExpression> projections, LogicalPlan child) {
|
||||
return projections.stream().flatMap(e -> {
|
||||
// check if there's a qualifier
|
||||
// no - means only top-level
|
||||
// it is - return only that level
|
||||
if (e instanceof UnresolvedStar) {
|
||||
List<Attribute> output = child.output();
|
||||
UnresolvedStar us = (UnresolvedStar) e;
|
||||
List<NamedExpression> result = new ArrayList<>();
|
||||
|
||||
Stream<Attribute> stream = output.stream();
|
||||
List<Attribute> output = child.output();
|
||||
for (NamedExpression ne : projections) {
|
||||
if (ne instanceof UnresolvedStar) {
|
||||
UnresolvedStar us = (UnresolvedStar) ne;
|
||||
|
||||
if (us.qualifier() == null) {
|
||||
stream = stream.filter(a -> !(a instanceof NestedFieldAttribute));
|
||||
}
|
||||
|
||||
// if there's a qualifier, inspect that level
|
||||
// a qualifier is specified - since this is a star, it should be a CompoundDataType
|
||||
if (us.qualifier() != null) {
|
||||
// qualifier is selected, need to resolve that first.
|
||||
Attribute qualifier = resolveAgainstList(us.qualifier(), output, false);
|
||||
stream = stream.filter(a -> (a instanceof NestedFieldAttribute)
|
||||
&& Objects.equals(a.qualifier(), qualifier.qualifier())
|
||||
&& Objects.equals(((NestedFieldAttribute) a).parentPath(), qualifier.name()));
|
||||
}
|
||||
// resolve the so-called qualifier first
|
||||
// since this is an unresolved start we don't know whether it's a path or an actual qualifier
|
||||
Attribute q = resolveAgainstList(us.qualifier(), output, false);
|
||||
|
||||
return stream.filter(a -> !(a.dataType() instanceof CompoundDataType));
|
||||
}
|
||||
else if (e instanceof UnresolvedAlias) {
|
||||
UnresolvedAlias ua = (UnresolvedAlias) e;
|
||||
if (ua.child() instanceof UnresolvedStar) {
|
||||
return child.output().stream();
|
||||
// now use the resolved 'qualifier' to match
|
||||
for (Attribute attr : output) {
|
||||
// filter the attributes that match based on their path
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (q.qualifier() != null) {
|
||||
if (Objects.equals(q.qualifiedName(), fa.qualifiedName())) {
|
||||
result.add(fa.withLocation(attr.location()));
|
||||
}
|
||||
} else {
|
||||
// use the path only to match non-compound types
|
||||
if (Objects.equals(q.name(), fa.path())) {
|
||||
result.add(fa.withLocation(attr.location()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// add only primitives
|
||||
// but filter out multi fields
|
||||
Set<Attribute> seenMultiFields = new LinkedHashSet<>();
|
||||
|
||||
for (Attribute a : output) {
|
||||
if (a.dataType().isPrimitive()) {
|
||||
if (a instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) a;
|
||||
if (!seenMultiFields.contains(fa.parent())) {
|
||||
result.add(a);
|
||||
seenMultiFields.add(a);
|
||||
}
|
||||
} else {
|
||||
result.add(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Stream.of(e);
|
||||
} else if (ne instanceof UnresolvedAlias) {
|
||||
UnresolvedAlias ua = (UnresolvedAlias) ne;
|
||||
if (ua.child() instanceof UnresolvedStar) {
|
||||
// add only primitives
|
||||
for (Attribute a : output) {
|
||||
if (a.dataType().isPrimitive()) {
|
||||
result.add(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result.add(ne);
|
||||
}
|
||||
return Stream.of(e);
|
||||
})
|
||||
.map(NamedExpression.class::cast)
|
||||
.collect(toList());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// generate a new (right) logical plan with different IDs for all conflicting attributes
|
||||
|
|
|
@ -6,12 +6,10 @@
|
|||
package org.elasticsearch.xpack.sql.analysis.analyzer;
|
||||
|
||||
import org.elasticsearch.xpack.sql.capabilities.Unresolvable;
|
||||
import org.elasticsearch.xpack.sql.expression.Alias;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Exists;
|
||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.expression.Order;
|
||||
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
|
||||
|
@ -129,15 +127,22 @@ abstract class Verifier {
|
|||
// handle Attributes different to provide more context
|
||||
if (ae instanceof UnresolvedAttribute) {
|
||||
UnresolvedAttribute ua = (UnresolvedAttribute) ae;
|
||||
boolean useQualifier = ua.qualifier() != null;
|
||||
List<String> potentialMatches = new ArrayList<>();
|
||||
for (Attribute a : p.intputSet()) {
|
||||
potentialMatches.add(useQualifier ? a.qualifiedName() : a.name());
|
||||
}
|
||||
// only work out the synonyms for raw unresolved attributes
|
||||
if (!ua.customMessage()) {
|
||||
boolean useQualifier = ua.qualifier() != null;
|
||||
List<String> potentialMatches = new ArrayList<>();
|
||||
for (Attribute a : p.intputSet()) {
|
||||
String nameCandidate = useQualifier ? a.qualifiedName() : a.name();
|
||||
// add only primitives (object types would only result in another error)
|
||||
if (a.dataType().isPrimitive()) {
|
||||
potentialMatches.add(nameCandidate);
|
||||
}
|
||||
}
|
||||
|
||||
List<String> matches = StringUtils.findSimilar(ua.qualifiedName(), potentialMatches);
|
||||
if (!matches.isEmpty()) {
|
||||
ae = new UnresolvedAttribute(ua.location(), ua.name(), ua.qualifier(), UnresolvedAttribute.errorMessage(ua.qualifiedName(), matches));
|
||||
List<String> matches = StringUtils.findSimilar(ua.qualifiedName(), potentialMatches);
|
||||
if (!matches.isEmpty()) {
|
||||
ae = ua.withUnresolvedMessage(UnresolvedAttribute.errorMessage(ua.qualifiedName(), matches));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -261,7 +266,7 @@ abstract class Verifier {
|
|||
Expressions.names(a.groupings())));
|
||||
groupingFailures.add(a);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
@ -300,7 +305,7 @@ abstract class Verifier {
|
|||
a.aggregates().forEach(ne ->
|
||||
ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions)));
|
||||
|
||||
if (!missing.isEmpty()) {
|
||||
if (!missing.isEmpty()) {
|
||||
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
|
||||
localFailures.add(fail(missing.values().iterator().next(), "Cannot use non-grouped column" + plural + " %s, expected %s",
|
||||
Expressions.names(missing.keySet()),
|
||||
|
@ -378,4 +383,4 @@ abstract class Verifier {
|
|||
.forEach(exp -> localFailures.add(fail(exp, "[SCORE()] cannot be an argument to a function"))),
|
||||
Function.class));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.sql.analysis.index;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature;
|
||||
|
@ -96,11 +97,7 @@ public class IndexResolver {
|
|||
}
|
||||
|
||||
private static GetIndexResult buildGetIndexResult(String concreteIndex, String indexOrAlias,
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings) {
|
||||
if (concreteIndex.startsWith(".")) {
|
||||
//Indices that start with "." are considered internal and should not be available to SQL
|
||||
return GetIndexResult.notFound(indexOrAlias);
|
||||
}
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings) {
|
||||
|
||||
// Make sure that the index contains only a single type
|
||||
MappingMetaData singleType = null;
|
||||
|
@ -128,8 +125,12 @@ public class IndexResolver {
|
|||
return GetIndexResult.invalid(
|
||||
"[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql");
|
||||
} else {
|
||||
Map<String, DataType> mapping = Types.fromEs(singleType.sourceAsMap());
|
||||
return GetIndexResult.valid(new EsIndex(indexOrAlias, mapping));
|
||||
try {
|
||||
Map<String, DataType> mapping = Types.fromEs(singleType.sourceAsMap());
|
||||
return GetIndexResult.valid(new EsIndex(indexOrAlias, mapping));
|
||||
} catch (MappingException ex) {
|
||||
return GetIndexResult.invalid(ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -44,7 +44,7 @@ abstract class AbstractSearchHitRowSet extends AbstractRowSet {
|
|||
|
||||
String innerHit = null;
|
||||
for (HitExtractor ex : exts) {
|
||||
innerHit = ex.innerHitName();
|
||||
innerHit = ex.hitName();
|
||||
if (innerHit != null) {
|
||||
innerHits.add(innerHit);
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ abstract class AbstractSearchHitRowSet extends AbstractRowSet {
|
|||
@Override
|
||||
protected Object getColumn(int column) {
|
||||
HitExtractor e = extractors.get(column);
|
||||
int extractorLevel = e.innerHitName() == null ? 0 : 1;
|
||||
int extractorLevel = e.hitName() == null ? 0 : 1;
|
||||
|
||||
SearchHit hit = null;
|
||||
SearchHit[] sh = hits;
|
||||
|
|
|
@ -26,10 +26,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
|||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractor;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggValueInput;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.HitExtractorInput;
|
||||
|
@ -40,7 +38,6 @@ import org.elasticsearch.xpack.sql.querydsl.agg.AggPath;
|
|||
import org.elasticsearch.xpack.sql.querydsl.container.AggRef;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.NestedFieldRef;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef;
|
||||
import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef;
|
||||
|
@ -239,14 +236,14 @@ public class Scroller {
|
|||
|
||||
// if there's an id, try to setup next scroll
|
||||
if (scrollId != null &&
|
||||
// is all the content already retrieved?
|
||||
// is all the content already retrieved?
|
||||
(Boolean.TRUE.equals(response.isTerminatedEarly()) || response.getHits().getTotalHits() == hits.length
|
||||
// or maybe the limit has been reached
|
||||
|| (hits.length >= query.limit() && query.limit() > -1))) {
|
||||
// if so, clear the scroll
|
||||
clearScroll(response.getScrollId(), ActionListener.wrap(
|
||||
succeeded -> listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), null)),
|
||||
listener::onFailure));
|
||||
// or maybe the limit has been reached
|
||||
|| (hits.length >= query.limit() && query.limit() > -1))) {
|
||||
// if so, clear the scroll
|
||||
clearScroll(response.getScrollId(), ActionListener.wrap(
|
||||
succeeded -> listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), null)),
|
||||
listener::onFailure));
|
||||
} else {
|
||||
listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), scrollId));
|
||||
}
|
||||
|
@ -273,17 +270,12 @@ public class Scroller {
|
|||
private HitExtractor createExtractor(ColumnReference ref) {
|
||||
if (ref instanceof SearchHitFieldRef) {
|
||||
SearchHitFieldRef f = (SearchHitFieldRef) ref;
|
||||
return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name());
|
||||
}
|
||||
|
||||
if (ref instanceof NestedFieldRef) {
|
||||
NestedFieldRef f = (NestedFieldRef) ref;
|
||||
return new InnerHitExtractor(f.parent(), f.name(), f.useDocValue());
|
||||
return new FieldHitExtractor(f.name(), f.useDocValue(), f.hitName());
|
||||
}
|
||||
|
||||
if (ref instanceof ScriptFieldRef) {
|
||||
ScriptFieldRef f = (ScriptFieldRef) ref;
|
||||
return new DocValueExtractor(f.name());
|
||||
return new FieldHitExtractor(f.name(), true);
|
||||
}
|
||||
|
||||
if (ref instanceof ComputedRef) {
|
||||
|
@ -318,16 +310,28 @@ public class Scroller {
|
|||
try {
|
||||
ShardSearchFailure[] failure = response.getShardFailures();
|
||||
if (!CollectionUtils.isEmpty(failure)) {
|
||||
onFailure(new ExecutionException(failure[0].reason(), failure[0].getCause()));
|
||||
cleanupScroll(response, new ExecutionException(failure[0].reason(), failure[0].getCause()));
|
||||
}
|
||||
handleResponse(response, listener);
|
||||
} catch (Exception ex) {
|
||||
onFailure(ex);
|
||||
cleanupScroll(response, ex);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void handleResponse(SearchResponse response, ActionListener<SchemaRowSet> listener);
|
||||
|
||||
// clean-up the scroll in case of exception
|
||||
protected final void cleanupScroll(SearchResponse response, Exception ex) {
|
||||
if (response != null && response.getScrollId() != null) {
|
||||
client.prepareClearScroll().addScrollId(response.getScrollId())
|
||||
// in case of failure, report the initial exception instead of the one resulting from cleaning the scroll
|
||||
.execute(ActionListener.wrap(r -> listener.onFailure(ex), e -> {
|
||||
ex.addSuppressed(e);
|
||||
listener.onFailure(ex);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
protected final void clearScroll(String scrollId, ActionListener<Boolean> listener) {
|
||||
if (scrollId != null) {
|
||||
client.prepareClearScroll().addScrollId(scrollId).execute(
|
||||
|
@ -344,4 +348,4 @@ public class Scroller {
|
|||
listener.onFailure(ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,8 +22,6 @@ import org.elasticsearch.search.sort.SortOrder;
|
|||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ScoreProcessorDefinition;
|
||||
|
@ -142,52 +140,49 @@ public abstract class SourceGenerator {
|
|||
// sorting only works on not-analyzed fields - look for a multi-field replacement
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
attr = fa.isAnalyzed() ? fa.notAnalyzedAttribute() : attr;
|
||||
}
|
||||
fa = fa.isInexact() ? fa.exactAttribute() : fa;
|
||||
|
||||
// top-level doc value
|
||||
if (attr instanceof RootFieldAttribute) {
|
||||
sortBuilder = fieldSort(((RootFieldAttribute) attr).name());
|
||||
}
|
||||
if (attr instanceof NestedFieldAttribute) {
|
||||
NestedFieldAttribute nfa = (NestedFieldAttribute) attr;
|
||||
FieldSortBuilder fieldSort = fieldSort(nfa.name());
|
||||
|
||||
String nestedPath = nfa.parentPath();
|
||||
NestedSortBuilder newSort = new NestedSortBuilder(nestedPath);
|
||||
NestedSortBuilder nestedSort = fieldSort.getNestedSort();
|
||||
|
||||
if (nestedSort == null) {
|
||||
fieldSort.setNestedSort(newSort);
|
||||
sortBuilder = fieldSort(fa.name());
|
||||
if (!fa.isNested()) {
|
||||
sortBuilder = fieldSort(fa.name());
|
||||
} else {
|
||||
for (; nestedSort.getNestedSort() != null; nestedSort = nestedSort.getNestedSort()) {
|
||||
}
|
||||
nestedSort.setNestedSort(newSort);
|
||||
}
|
||||
FieldSortBuilder fieldSort = fieldSort(fa.name());
|
||||
String nestedPath = fa.nestedParent().path();
|
||||
NestedSortBuilder newSort = new NestedSortBuilder(nestedPath);
|
||||
NestedSortBuilder nestedSort = fieldSort.getNestedSort();
|
||||
|
||||
nestedSort = newSort;
|
||||
|
||||
List<QueryBuilder> nestedQuery = new ArrayList<>(1);
|
||||
|
||||
// copy also the nested queries fr(if any)
|
||||
if (container.query() != null) {
|
||||
container.query().forEachDown(nq -> {
|
||||
// found a match
|
||||
if (nestedPath.equals(nq.path())) {
|
||||
// get the child query - the nested wrapping and inner hits are not needed
|
||||
nestedQuery.add(nq.child().asBuilder());
|
||||
if (nestedSort == null) {
|
||||
fieldSort.setNestedSort(newSort);
|
||||
} else {
|
||||
for (; nestedSort.getNestedSort() != null; nestedSort = nestedSort.getNestedSort()) {
|
||||
}
|
||||
}, NestedQuery.class);
|
||||
}
|
||||
|
||||
if (nestedQuery.size() > 0) {
|
||||
if (nestedQuery.size() > 1) {
|
||||
throw new SqlIllegalArgumentException("nested query should have been grouped in one place");
|
||||
nestedSort.setNestedSort(newSort);
|
||||
}
|
||||
nestedSort.setFilter(nestedQuery.get(0));
|
||||
}
|
||||
|
||||
sortBuilder = fieldSort;
|
||||
nestedSort = newSort;
|
||||
|
||||
List<QueryBuilder> nestedQuery = new ArrayList<>(1);
|
||||
|
||||
// copy also the nested queries fr(if any)
|
||||
if (container.query() != null) {
|
||||
container.query().forEachDown(nq -> {
|
||||
// found a match
|
||||
if (nestedPath.equals(nq.path())) {
|
||||
// get the child query - the nested wrapping and inner hits are not needed
|
||||
nestedQuery.add(nq.child().asBuilder());
|
||||
}
|
||||
}, NestedQuery.class);
|
||||
}
|
||||
|
||||
if (nestedQuery.size() > 0) {
|
||||
if (nestedQuery.size() > 1) {
|
||||
throw new SqlIllegalArgumentException("nested query should have been grouped in one place");
|
||||
}
|
||||
nestedSort.setFilter(nestedQuery.get(0));
|
||||
}
|
||||
|
||||
sortBuilder = fieldSort;
|
||||
}
|
||||
}
|
||||
} else if (sortable instanceof ScriptSort) {
|
||||
ScriptSort ss = (ScriptSort) sortable;
|
||||
|
@ -212,4 +207,4 @@ public abstract class SourceGenerator {
|
|||
source.storedFields(NO_STORED_FIELD);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -61,7 +61,7 @@ public class ComputingHitExtractor implements HitExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
public String hitName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ public class ConstantExtractor implements HitExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
public String hitName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Extracts field values from {@link SearchHit#field(String)}.
|
||||
*/
|
||||
public class DocValueExtractor implements HitExtractor {
|
||||
/**
|
||||
* Stands for {@code doc_value}. We try to use short names for {@link HitExtractor}s
|
||||
* to save a few bytes when when we send them back to the user.
|
||||
*/
|
||||
static final String NAME = "d";
|
||||
private final String fieldName;
|
||||
|
||||
public DocValueExtractor(String name) {
|
||||
this.fieldName = name;
|
||||
}
|
||||
|
||||
DocValueExtractor(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(SearchHit hit) {
|
||||
// TODO we should think about what to do with multi-valued fields.
|
||||
// Tracked by https://github.com/elastic/x-pack-elasticsearch/issues/2874
|
||||
DocumentField field = hit.field(fieldName);
|
||||
if (field != null) {
|
||||
Object value = field.getValue();
|
||||
if (value != null && value instanceof ReadableInstant) {
|
||||
return ((ReadableInstant) value).getMillis();
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
DocValueExtractor other = (DocValueExtractor) obj;
|
||||
return fieldName.equals(other.fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return fieldName.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
/* % kind of looks like two 0s with a column separator between
|
||||
* them so it makes me think of columnar storage which doc
|
||||
* values are. */
|
||||
return "%" + fieldName;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,144 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||
import org.joda.time.ReadableInstant;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Extractor for ES fields. Works for both 'normal' fields but also nested ones (which require hitName to be set).
|
||||
* The latter is used as metadata in assembling the results in the tabular response.
|
||||
*/
|
||||
public class FieldHitExtractor implements HitExtractor {
|
||||
|
||||
private static final boolean ARRAYS_LENIENCY = false;
|
||||
|
||||
/**
|
||||
* Stands for {@code field}. We try to use short names for {@link HitExtractor}s
|
||||
* to save a few bytes when when we send them back to the user.
|
||||
*/
|
||||
static final String NAME = "f";
|
||||
|
||||
private final String fieldName, hitName;
|
||||
private final boolean useDocValue;
|
||||
private final String[] path;
|
||||
|
||||
public FieldHitExtractor(String name, boolean useDocValue) {
|
||||
this(name, useDocValue, null);
|
||||
}
|
||||
|
||||
public FieldHitExtractor(String name, boolean useDocValue, String hitName) {
|
||||
this.fieldName = name;
|
||||
this.useDocValue = useDocValue;
|
||||
this.hitName = hitName;
|
||||
this.path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, ".");
|
||||
}
|
||||
|
||||
FieldHitExtractor(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
useDocValue = in.readBoolean();
|
||||
hitName = in.readOptionalString();
|
||||
path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, ".");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeBoolean(useDocValue);
|
||||
out.writeOptionalString(hitName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(SearchHit hit) {
|
||||
Object value = null;
|
||||
if (useDocValue) {
|
||||
DocumentField field = hit.field(fieldName);
|
||||
if (field != null) {
|
||||
checkMultiValue(field.getValues());
|
||||
value = field.getValue();
|
||||
if (value instanceof ReadableInstant) {
|
||||
value = ((ReadableInstant) value).getMillis();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Map<String, Object> source = hit.getSourceAsMap();
|
||||
if (source != null) {
|
||||
value = extractFromSource(source);
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
private void checkMultiValue(Object values) {
|
||||
if (!ARRAYS_LENIENCY && values != null && values instanceof List && ((List<?>) values).size() > 1) {
|
||||
throw new ExecutionException("Arrays (returned by [%s]) are not supported", fieldName);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Object extractFromSource(Map<String, Object> map) {
|
||||
Object value = null;
|
||||
// each node is a key inside the map
|
||||
for (String node : path) {
|
||||
// if it's not the first step, start unpacking
|
||||
if (value != null) {
|
||||
if (value instanceof Map) {
|
||||
map = (Map<String, Object>) value;
|
||||
} else {
|
||||
throw new ExecutionException("Cannot extract value [%s] from source", fieldName);
|
||||
}
|
||||
}
|
||||
value = map.get(node);
|
||||
}
|
||||
checkMultiValue(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String hitName() {
|
||||
return hitName;
|
||||
}
|
||||
|
||||
public String fieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return fieldName + "@" + hitName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
FieldHitExtractor other = (FieldHitExtractor) obj;
|
||||
return fieldName.equals(other.fieldName)
|
||||
&& hitName.equals(other.hitName)
|
||||
&& useDocValue == other.useDocValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(fieldName, useDocValue, hitName);
|
||||
}
|
||||
}
|
|
@ -22,5 +22,5 @@ public interface HitExtractor extends NamedWriteable {
|
|||
* Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise.
|
||||
*/
|
||||
@Nullable
|
||||
String innerHitName();
|
||||
String hitName();
|
||||
}
|
|
@ -20,9 +20,7 @@ public abstract class HitExtractors {
|
|||
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
|
||||
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
|
||||
entries.add(new Entry(HitExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, DocValueExtractor.NAME, DocValueExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, InnerHitExtractor.NAME, InnerHitExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, SourceExtractor.NAME, SourceExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, FieldHitExtractor.NAME, FieldHitExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, ComputingHitExtractor.NAME, ComputingHitExtractor::new));
|
||||
entries.add(new Entry(HitExtractor.class, ScoreExtractor.NAME, in -> ScoreExtractor.INSTANCE));
|
||||
entries.addAll(Processors.getNamedWriteables());
|
||||
|
|
|
@ -1,116 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class InnerHitExtractor implements HitExtractor {
|
||||
/**
|
||||
* Stands for {@code inner}. We try to use short names for {@link HitExtractor}s
|
||||
* to save a few bytes when when we send them back to the user.
|
||||
*/
|
||||
static final String NAME = "i";
|
||||
private final String hitName, fieldName;
|
||||
private final boolean useDocValue;
|
||||
private final String[] tree;
|
||||
|
||||
public InnerHitExtractor(String hitName, String name, boolean useDocValue) {
|
||||
this.hitName = hitName;
|
||||
this.fieldName = name;
|
||||
this.useDocValue = useDocValue;
|
||||
this.tree = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(name, ".");
|
||||
}
|
||||
|
||||
InnerHitExtractor(StreamInput in) throws IOException {
|
||||
hitName = in.readString();
|
||||
fieldName = in.readString();
|
||||
useDocValue = in.readBoolean();
|
||||
tree = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, ".");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(hitName);
|
||||
out.writeString(fieldName);
|
||||
out.writeBoolean(useDocValue);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public Object get(SearchHit hit) {
|
||||
if (useDocValue) {
|
||||
DocumentField field = hit.field(fieldName);
|
||||
return field != null ? field.getValue() : null;
|
||||
}
|
||||
else {
|
||||
Map<String, Object> source = hit.getSourceAsMap();
|
||||
if (source == null) {
|
||||
return null;
|
||||
}
|
||||
Object value = null;
|
||||
for (String node : tree) {
|
||||
if (value != null) {
|
||||
if (value instanceof Map) {
|
||||
source = (Map<String, Object>) value;
|
||||
}
|
||||
else {
|
||||
throw new ExecutionException("Cannot extract value %s from source", fieldName);
|
||||
}
|
||||
}
|
||||
value = source.get(node);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
return hitName;
|
||||
}
|
||||
|
||||
String fieldName() {
|
||||
return fieldName;
|
||||
}
|
||||
|
||||
public String hitName() {
|
||||
return hitName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return fieldName + "@" + hitName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
InnerHitExtractor other = (InnerHitExtractor) obj;
|
||||
return fieldName.equals(other.fieldName)
|
||||
&& hitName.equals(other.hitName)
|
||||
&& useDocValue == other.useDocValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(hitName, fieldName, useDocValue);
|
||||
}
|
||||
}
|
|
@ -39,7 +39,7 @@ public class ScoreExtractor implements HitExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
public String hitName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,75 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
public class SourceExtractor implements HitExtractor {
|
||||
/**
|
||||
* Stands for {@code _source}. We try to use short names for {@link HitExtractor}s
|
||||
* to save a few bytes when when we send them back to the user.
|
||||
*/
|
||||
public static final String NAME = "s";
|
||||
private final String fieldName;
|
||||
|
||||
public SourceExtractor(String name) {
|
||||
this.fieldName = name;
|
||||
}
|
||||
|
||||
SourceExtractor(StreamInput in) throws IOException {
|
||||
fieldName = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(SearchHit hit) {
|
||||
Map<String, Object> source = hit.getSourceAsMap();
|
||||
// TODO I think this will not work with dotted field names (objects or actual dots in the names)
|
||||
// confusingly, I think this is actually handled by InnerHitExtractor. This needs investigating or renaming
|
||||
// Tracked by https://github.com/elastic/x-pack-elasticsearch/issues/2874
|
||||
return source != null ? source.get(fieldName) : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String innerHitName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null || obj.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
SourceExtractor other = (SourceExtractor) obj;
|
||||
return fieldName.equals(other.fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return fieldName.hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
/* # is sometimes known as the "hash" sign which reminds
|
||||
* me of a hash table lookup. */
|
||||
return "#" + fieldName;
|
||||
}
|
||||
}
|
|
@ -32,7 +32,7 @@ public class Alias extends NamedExpression {
|
|||
public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id) {
|
||||
this(location, name, qualifier, child, id, false);
|
||||
}
|
||||
|
||||
|
||||
public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id, boolean synthetic) {
|
||||
super(location, name, singletonList(child), id, synthetic);
|
||||
this.child = child;
|
||||
|
@ -68,13 +68,13 @@ public class Alias extends NamedExpression {
|
|||
private Attribute createAttribute() {
|
||||
if (resolved()) {
|
||||
Expression c = child();
|
||||
|
||||
|
||||
Attribute attr = Expressions.attribute(c);
|
||||
if (attr != null) {
|
||||
return attr.clone(location(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic());
|
||||
}
|
||||
else {
|
||||
return new RootFieldAttribute(location(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic());
|
||||
return new FieldAttribute(location(), null, name(), child.dataType(), qualifier, child.nullable(), id(), synthetic());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,50 +5,127 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.expression;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.TextType;
|
||||
import org.elasticsearch.xpack.sql.type.KeywordType;
|
||||
import org.elasticsearch.xpack.sql.type.NestedType;
|
||||
import org.elasticsearch.xpack.sql.type.StringType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
public abstract class FieldAttribute extends TypedAttribute {
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
|
||||
FieldAttribute(Location location, String name, DataType dataType) {
|
||||
this(location, name, dataType, null, true, null, false);
|
||||
/**
|
||||
* Attribute for an ES field.
|
||||
* To differentiate between the different type of fields this class offers:
|
||||
* - name - the fully qualified name (foo.bar.tar)
|
||||
* - path - the path pointing to the field name (foo.bar)
|
||||
* - parent - the immediate parent of the field; useful for figuring out the type of field (nested vs object)
|
||||
* - nestedParent - if nested, what's the parent (which might not be the immediate one)
|
||||
*/
|
||||
public class FieldAttribute extends TypedAttribute {
|
||||
|
||||
private final FieldAttribute parent;
|
||||
private final FieldAttribute nestedParent;
|
||||
private final String path;
|
||||
|
||||
public FieldAttribute(Location location, String name, DataType dataType) {
|
||||
this(location, null, name, dataType);
|
||||
}
|
||||
|
||||
FieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType) {
|
||||
this(location, parent, name, dataType, null, true, null, false);
|
||||
}
|
||||
|
||||
public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType, String qualifier,
|
||||
boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
||||
}
|
||||
this.path = parent != null ? parent.name() : StringUtils.EMPTY;
|
||||
this.parent = parent;
|
||||
|
||||
public boolean isAnalyzed() {
|
||||
return dataType() instanceof TextType;
|
||||
}
|
||||
|
||||
public FieldAttribute notAnalyzedAttribute() {
|
||||
if (isAnalyzed()) {
|
||||
Map<String, DataType> docValueFields = ((TextType) dataType()).docValueFields();
|
||||
if (docValueFields.size() == 1) {
|
||||
Entry<String, DataType> entry = docValueFields.entrySet().iterator().next();
|
||||
return with(entry.getKey(), entry.getValue());
|
||||
// figure out the last nested parent
|
||||
FieldAttribute nestedPar = null;
|
||||
if (parent != null) {
|
||||
nestedPar = parent.nestedParent;
|
||||
if (parent.dataType() instanceof NestedType) {
|
||||
nestedPar = parent;
|
||||
}
|
||||
if (docValueFields.isEmpty()) {
|
||||
}
|
||||
this.nestedParent = nestedPar;
|
||||
}
|
||||
|
||||
public FieldAttribute parent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
public String path() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public String qualifiedPath() {
|
||||
return qualifier() != null ? qualifier() + "." + path : path;
|
||||
}
|
||||
|
||||
public boolean isNested() {
|
||||
return nestedParent != null;
|
||||
}
|
||||
|
||||
public FieldAttribute nestedParent() {
|
||||
return nestedParent;
|
||||
}
|
||||
|
||||
public boolean isInexact() {
|
||||
return (dataType() instanceof StringType && ((StringType) dataType()).isInexact());
|
||||
}
|
||||
|
||||
public FieldAttribute exactAttribute() {
|
||||
if (isInexact()) {
|
||||
Map<String, KeywordType> exactFields = ((StringType) dataType()).exactKeywords();
|
||||
if (exactFields.size() == 1) {
|
||||
Entry<String, KeywordType> entry = exactFields.entrySet().iterator().next();
|
||||
return innerField(entry.getKey(), entry.getValue());
|
||||
}
|
||||
if (exactFields.isEmpty()) {
|
||||
throw new MappingException("No docValue multi-field defined for %s", name());
|
||||
}
|
||||
if (docValueFields.size() > 1) {
|
||||
DataType dataType = docValueFields.get("keyword");
|
||||
if (dataType != null && dataType.hasDocValues()) {
|
||||
return with("keyword", dataType);
|
||||
}
|
||||
throw new MappingException("Default 'keyword' not available as multi-fields and multiple options available for %s", name());
|
||||
// pick the default - keyword
|
||||
if (exactFields.size() > 1) {
|
||||
throw new MappingException("Multiple exact keyword candidates %s available for %s; specify which one to use",
|
||||
exactFields.keySet(), name());
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
protected FieldAttribute with(String subFieldName, DataType type) {
|
||||
return (FieldAttribute) clone(location(), name() + "." + subFieldName, type, qualifier(), nullable(), id(), synthetic());
|
||||
private FieldAttribute innerField(String subFieldName, DataType type) {
|
||||
return new FieldAttribute(location(), this, name() + "." + subFieldName, type, qualifier(), nullable(), id(), synthetic());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expression canonicalize() {
|
||||
return new FieldAttribute(location(), null, "<none>", dataType(), null, true, id(), false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null;
|
||||
return new FieldAttribute(location, qualifiedParent, name, dataType, qualifier, nullable, id, synthetic);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && Objects.equals(path, ((FieldAttribute) obj).path);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String label() {
|
||||
return "f";
|
||||
}
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class NestedFieldAttribute extends FieldAttribute {
|
||||
|
||||
private final List<String> parents;
|
||||
private final String parentPath;
|
||||
|
||||
public NestedFieldAttribute(Location location, String name, DataType dataType, List<String> parents) {
|
||||
this(location, name, dataType, null, true, null, false, parents);
|
||||
}
|
||||
|
||||
public NestedFieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, List<String> parents) {
|
||||
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
||||
this.parents = parents == null || parents.isEmpty() ? emptyList() : parents;
|
||||
this.parentPath = StringUtils.concatWithDot(parents);
|
||||
}
|
||||
|
||||
public List<String> parents() {
|
||||
return parents;
|
||||
}
|
||||
|
||||
public String parentPath() {
|
||||
return parentPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expression canonicalize() {
|
||||
return new NestedFieldAttribute(location(), "<none>", dataType(), null, true, id(), false, emptyList());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
return new NestedFieldAttribute(location, name, dataType, qualifier, nullable, id, synthetic, parents);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (parents.size() > 0) {
|
||||
return name().replace('.', '>') + "#" + id();
|
||||
}
|
||||
return super.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String label() {
|
||||
return "n";
|
||||
}
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.expression;
|
||||
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
|
||||
public class RootFieldAttribute extends FieldAttribute {
|
||||
|
||||
public RootFieldAttribute(Location location, String name, DataType dataType) {
|
||||
this(location, name, dataType, null, true, null, false);
|
||||
}
|
||||
|
||||
public RootFieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
super(location, name, dataType, qualifier, nullable, id, synthetic);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Expression canonicalize() {
|
||||
return new RootFieldAttribute(location(), "<none>", dataType(), null, true, id(), false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) {
|
||||
return new RootFieldAttribute(location, name, dataType, qualifier, nullable, id, synthetic);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String label() {
|
||||
return "r";
|
||||
}
|
||||
}
|
|
@ -35,11 +35,6 @@ public abstract class TypedAttribute extends Attribute {
|
|||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj)) {
|
||||
TypedAttribute other = (TypedAttribute) obj;
|
||||
return Objects.equals(dataType, other.dataType);
|
||||
}
|
||||
|
||||
return false;
|
||||
return super.equals(obj) && Objects.equals(dataType, ((TypedAttribute) obj).dataType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,9 +17,11 @@ import java.util.Objects;
|
|||
|
||||
import static java.lang.String.format;
|
||||
|
||||
// unfortunately we can't use UnresolvedNamedExpression
|
||||
public class UnresolvedAttribute extends Attribute implements Unresolvable {
|
||||
|
||||
private final String unresolvedMsg;
|
||||
private final boolean customMessage;
|
||||
private final Object resolutionMetadata;
|
||||
|
||||
public UnresolvedAttribute(Location location, String name) {
|
||||
|
@ -36,6 +38,7 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
|
|||
|
||||
public UnresolvedAttribute(Location location, String name, String qualifier, ExpressionId id, String unresolvedMessage, Object resolutionMetadata) {
|
||||
super(location, name, qualifier, id);
|
||||
this.customMessage = unresolvedMessage != null;
|
||||
this.unresolvedMsg = unresolvedMessage == null ? errorMessage(qualifiedName(), null) : unresolvedMessage;
|
||||
this.resolutionMetadata = resolutionMetadata;
|
||||
}
|
||||
|
@ -45,6 +48,10 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable {
|
|||
return resolutionMetadata;
|
||||
}
|
||||
|
||||
public boolean customMessage() {
|
||||
return customMessage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean resolved() {
|
||||
return false;
|
||||
|
|
|
@ -14,7 +14,7 @@ import static java.util.Collections.emptyList;
|
|||
|
||||
public class UnresolvedStar extends UnresolvedNamedExpression {
|
||||
|
||||
// typically used for nested fields
|
||||
// typically used for nested fields or inner/dotted fields
|
||||
private final UnresolvedAttribute qualifier;
|
||||
|
||||
public UnresolvedStar(Location location, UnresolvedAttribute qualifier) {
|
||||
|
|
|
@ -16,9 +16,9 @@ import org.elasticsearch.xpack.sql.expression.Expression;
|
|||
import org.elasticsearch.xpack.sql.expression.ExpressionId;
|
||||
import org.elasticsearch.xpack.sql.expression.ExpressionSet;
|
||||
import org.elasticsearch.xpack.sql.expression.Expressions;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Order;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute;
|
||||
|
@ -95,19 +95,19 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
@Override
|
||||
protected Iterable<RuleExecutor<LogicalPlan>.Batch> batches() {
|
||||
Batch resolution = new Batch("Finish Analysis",
|
||||
Batch resolution = new Batch("Finish Analysis",
|
||||
new PruneSubqueryAliases(),
|
||||
CleanAliases.INSTANCE
|
||||
);
|
||||
|
||||
Batch aggregate = new Batch("Aggregation",
|
||||
Batch aggregate = new Batch("Aggregation",
|
||||
new PruneDuplicatesInGroupBy(),
|
||||
new ReplaceDuplicateAggsWithReferences(),
|
||||
new ReplaceAggsWithMatrixStats(),
|
||||
new ReplaceAggsWithExtendedStats(),
|
||||
new ReplaceAggsWithStats(),
|
||||
new PromoteStatsToExtendedStats(),
|
||||
new ReplaceAggsWithPercentiles(),
|
||||
new PromoteStatsToExtendedStats(),
|
||||
new ReplaceAggsWithPercentiles(),
|
||||
new ReplceAggsWithPercentileRanks()
|
||||
);
|
||||
|
||||
|
@ -134,10 +134,10 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
new SkipQueryOnLimitZero(),
|
||||
new SkipQueryIfFoldingProjection()
|
||||
);
|
||||
//new BalanceBooleanTrees());
|
||||
//new BalanceBooleanTrees());
|
||||
Batch label = new Batch("Set as Optimized", Limiter.ONCE,
|
||||
new SetAsOptimized());
|
||||
|
||||
|
||||
return Arrays.asList(resolution, aggregate, operators, local, label);
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
static class CleanAliases extends OptimizerRule<LogicalPlan> {
|
||||
|
||||
private static final CleanAliases INSTANCE = new CleanAliases();
|
||||
|
||||
|
||||
CleanAliases() {
|
||||
super(TransformDirection.UP);
|
||||
}
|
||||
|
@ -308,7 +308,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return p;
|
||||
}
|
||||
|
||||
// update old agg attributes
|
||||
// update old agg attributes
|
||||
return ReplaceAggsWithStats.updateAggAttributes(p, promotedFunctionIds);
|
||||
}
|
||||
|
||||
|
@ -360,12 +360,12 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
Map<Expression, Match> potentialPromotions = new LinkedHashMap<>();
|
||||
|
||||
p.forEachExpressionsUp(e -> collect(e, potentialPromotions));
|
||||
|
||||
|
||||
// no promotions found - skip
|
||||
if (potentialPromotions.isEmpty()) {
|
||||
return p;
|
||||
}
|
||||
|
||||
|
||||
// start promotion
|
||||
|
||||
// old functionId to new function attribute
|
||||
|
@ -431,13 +431,13 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
// 2a. collect ScalarFunctions that unwrapped refer to any of the updated aggregates
|
||||
// 2b. replace any of the old ScalarFunction attributes
|
||||
|
||||
|
||||
final Set<String> newAggIds = new LinkedHashSet<>(promotedFunctionIds.size());
|
||||
|
||||
|
||||
for (AggregateFunctionAttribute afa : promotedFunctionIds.values()) {
|
||||
newAggIds.add(afa.functionId());
|
||||
}
|
||||
|
||||
|
||||
final Map<String, ScalarFunctionAttribute> updatedScalarAttrs = new LinkedHashMap<>();
|
||||
final Map<ExpressionId, ScalarFunctionAttribute> updatedScalarAliases = new LinkedHashMap<>();
|
||||
|
||||
|
@ -452,7 +452,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
sfa = updatedScalarAliases.getOrDefault(sfa.id(), sfa);
|
||||
return sfa;
|
||||
}
|
||||
|
||||
|
||||
// unwrap aliases as they 'hide' functions under their own attributes
|
||||
if (e instanceof Alias) {
|
||||
Attribute att = Expressions.attribute(e);
|
||||
|
@ -500,7 +500,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class PromoteStatsToExtendedStats extends Rule<LogicalPlan, LogicalPlan> {
|
||||
|
||||
@Override
|
||||
|
@ -709,8 +709,11 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
for (Order order : ob.order()) {
|
||||
Attribute attr = ((NamedExpression) order.child()).toAttribute();
|
||||
if (attr instanceof NestedFieldAttribute) {
|
||||
nestedOrders.put(((NestedFieldAttribute) attr).parentPath(), order);
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
nestedOrders.put(fa.nestedParent().name(), order);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -723,8 +726,11 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
List<String> nestedTopFields = new ArrayList<>();
|
||||
|
||||
for (Attribute attr : project.output()) {
|
||||
if (attr instanceof NestedFieldAttribute) {
|
||||
nestedTopFields.add(((NestedFieldAttribute) attr).parentPath());
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
nestedTopFields.add(fa.nestedParent().name());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -933,7 +939,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return project;
|
||||
}
|
||||
|
||||
// normally only the upper projections should survive but since the lower list might have aliases definitions
|
||||
// normally only the upper projections should survive but since the lower list might have aliases definitions
|
||||
// that might be reused by the upper one, these need to be replaced.
|
||||
// for example an alias defined in the lower list might be referred in the upper - without replacing it the alias becomes invalid
|
||||
private List<NamedExpression> combineProjections(List<? extends NamedExpression> upper, List<? extends NamedExpression> lower) {
|
||||
|
@ -948,7 +954,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
AttributeMap<Alias> aliases = new AttributeMap<>(map);
|
||||
List<NamedExpression> replaced = new ArrayList<>();
|
||||
|
||||
|
||||
// replace any matching attribute with a lower alias (if there's a match)
|
||||
// but clean-up non-top aliases at the end
|
||||
for (NamedExpression ne : upper) {
|
||||
|
@ -956,7 +962,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
Alias as = aliases.get(a);
|
||||
return as != null ? as : a;
|
||||
}, Attribute.class);
|
||||
|
||||
|
||||
replaced.add((NamedExpression) CleanAliases.trimNonTopLevelAliases(replacedExp));
|
||||
}
|
||||
return replaced;
|
||||
|
@ -991,13 +997,13 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
}
|
||||
}, Project.class);
|
||||
|
||||
|
||||
if (attrs.isEmpty()) {
|
||||
return plan;
|
||||
}
|
||||
|
||||
|
||||
AtomicBoolean stop = new AtomicBoolean(false);
|
||||
|
||||
|
||||
// propagate folding up to unary nodes
|
||||
// anything higher and the propagate stops
|
||||
plan = plan.transformUp(p -> {
|
||||
|
@ -1021,12 +1027,12 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
|
||||
return p;
|
||||
});
|
||||
|
||||
|
||||
// finally clean-up aliases
|
||||
return CleanAliases.INSTANCE.apply(plan);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private boolean canPropagateFoldable(LogicalPlan p) {
|
||||
return p instanceof Project || p instanceof Filter || p instanceof SubQueryAlias || p instanceof Aggregate || p instanceof Limit || p instanceof OrderBy;
|
||||
}
|
||||
|
@ -1061,7 +1067,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
|
||||
private Expression fold(Expression e) {
|
||||
// literals are always foldable, so avoid creating a duplicate
|
||||
if (e.foldable() && !(e instanceof Literal)) {
|
||||
|
@ -1070,7 +1076,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static class BooleanSimplification extends OptimizerExpressionRule {
|
||||
|
||||
BooleanSimplification() {
|
||||
|
@ -1168,7 +1174,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return combineAnd(combine(common, new Or(combineLeft.location(), combineLeft, combineRight)));
|
||||
}
|
||||
|
||||
// TODO: eliminate conjunction/disjunction
|
||||
// TODO: eliminate conjunction/disjunction
|
||||
return bc;
|
||||
}
|
||||
|
||||
|
@ -1271,7 +1277,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
return new Range(and.location(), lb.left(), lb.right(), l instanceof GreaterThanOrEqual, rb.right(),
|
||||
r instanceof LessThanOrEqual);
|
||||
}
|
||||
// </<= AND >/>=
|
||||
// </<= AND >/>=
|
||||
else if ((r instanceof GreaterThan || r instanceof GreaterThanOrEqual)
|
||||
&& (l instanceof LessThan || l instanceof LessThanOrEqual)) {
|
||||
return new Range(and.location(), rb.left(), rb.right(), r instanceof GreaterThanOrEqual, lb.right(),
|
||||
|
@ -1289,7 +1295,7 @@ public class Optimizer extends RuleExecutor<LogicalPlan> {
|
|||
@Override
|
||||
protected LogicalPlan rule(Limit limit) {
|
||||
if (limit.limit() instanceof Literal) {
|
||||
if (Integer.valueOf(0).equals((Number) (((Literal) limit.limit()).fold()))) {
|
||||
if (Integer.valueOf(0).equals((((Literal) limit.limit()).fold()))) {
|
||||
return new LocalRelation(limit.location(), new EmptyExecutable(limit.output()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext;
|
|||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnExpressionContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
|
||||
|
@ -71,7 +70,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StarContext;
|
|||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
|
||||
import org.elasticsearch.xpack.sql.plan.TableIdentifier;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
|
@ -119,42 +117,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
|||
|
||||
@Override
|
||||
public Expression visitStar(StarContext ctx) {
|
||||
return new UnresolvedStar(source(ctx), ctx.qualifier != null ? visitColumnExpression(ctx.qualifier) : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visitDereference(DereferenceContext ctx) {
|
||||
String fieldName = visitIdentifier(ctx.fieldName);
|
||||
String qualifier = null;
|
||||
Expression base = expression(ctx.base);
|
||||
if (base != null) {
|
||||
if (base instanceof UnresolvedAttribute) {
|
||||
UnresolvedAttribute b = (UnresolvedAttribute) base;
|
||||
return new UnresolvedAttribute(source(ctx), b.name() + "." + fieldName, b.qualifier());
|
||||
}
|
||||
else {
|
||||
throw new UnsupportedOperationException(format(Locale.ROOT, "Uknown dereferencing using %s ", base.getClass()));
|
||||
}
|
||||
}
|
||||
return new UnresolvedAttribute(source(ctx), fieldName, qualifier);
|
||||
}
|
||||
|
||||
@Override
|
||||
public UnresolvedAttribute visitColumnExpression(ColumnExpressionContext ctx) {
|
||||
String qualifier = null;
|
||||
if (ctx.alias != null) {
|
||||
qualifier = visitIdentifier(ctx.alias);
|
||||
}
|
||||
else if (ctx.table != null) {
|
||||
TableIdentifier table = visitTableIdentifier(ctx.table);
|
||||
qualifier = table.index();
|
||||
}
|
||||
return new UnresolvedAttribute(source(ctx), visitIdentifier(ctx.name), qualifier);
|
||||
return new UnresolvedStar(source(ctx), ctx.qualifiedName() != null ?
|
||||
new UnresolvedAttribute(source(ctx.qualifiedName()), visitQualifiedName(ctx.qualifiedName())) : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visitColumnReference(ColumnReferenceContext ctx) {
|
||||
return visitColumnExpression(ctx.columnExpression());
|
||||
return new UnresolvedAttribute(source(ctx), visitIdentifier(ctx.identifier()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object visitDereference(DereferenceContext ctx) {
|
||||
return new UnresolvedAttribute(source(ctx), visitQualifiedName(ctx.qualifiedName()));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -217,7 +191,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
|||
case SqlBaseParser.RLIKE:
|
||||
e = new RLike(loc, exp, expression(pCtx.pattern));
|
||||
break;
|
||||
case SqlBaseParser.NULL:;
|
||||
case SqlBaseParser.NULL:
|
||||
// shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists))
|
||||
e = new IsNotNull(loc, exp);
|
||||
return pCtx.NOT() != null ? e : new Not(loc, e);
|
||||
|
|
|
@ -21,7 +21,7 @@ abstract class IdentifierBuilder extends AbstractBuilder {
|
|||
@Override
|
||||
public TableIdentifier visitTableIdentifier(TableIdentifierContext ctx) {
|
||||
String index = text(ctx.index);
|
||||
|
||||
|
||||
Location source = source(ctx);
|
||||
validateIndex(index, source);
|
||||
|
||||
|
@ -29,14 +29,14 @@ abstract class IdentifierBuilder extends AbstractBuilder {
|
|||
}
|
||||
|
||||
// see https://github.com/elastic/elasticsearch/issues/6736
|
||||
private static void validateIndex(String index, Location source) {
|
||||
static void validateIndex(String index, Location source) {
|
||||
for (int i = 0; i < index.length(); i++) {
|
||||
char c = index.charAt(i);
|
||||
if (Character.isUpperCase(c)) {
|
||||
throw new ParsingException(source, format(Locale.ROOT, "Invalid index name (needs to be lowercase) %s", index));
|
||||
}
|
||||
if (c == '.' || c == '\\' || c == '/' || c == '*' || c == '?' || c == '<' || c == '>' || c == '|' || c == ',') {
|
||||
throw new ParsingException(source, format(Locale.ROOT, "Illegal character %c in index name %s", c, index));
|
||||
if (c == '\\' || c == '/' || c == '<' || c == '>' || c == '|' || c == ',' || c == ' ') {
|
||||
throw new ParsingException(source, format(Locale.ROOT, "Invalid index name (illegal character %c) %s", c, index));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -640,18 +640,6 @@ class SqlBaseBaseListener implements SqlBaseListener {
|
|||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void enterColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation does nothing.</p>
|
||||
*/
|
||||
@Override public void exitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
|
|
|
@ -380,13 +380,6 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
|
|||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* <p>The default implementation returns the result of calling
|
||||
* {@link #visitChildren} on {@code ctx}.</p>
|
||||
*/
|
||||
@Override public T visitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { return visitChildren(ctx); }
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
|
|
|
@ -30,11 +30,11 @@ class SqlBaseLexer extends Lexer {
|
|||
MAPPED=39, MATCH=40, NATURAL=41, NOT=42, NULL=43, ON=44, OPTIMIZED=45,
|
||||
OR=46, ORDER=47, OUTER=48, PARSED=49, PHYSICAL=50, PLAN=51, QUERY=52,
|
||||
RIGHT=53, RLIKE=54, SCHEMAS=55, SELECT=56, SHOW=57, TABLES=58, TEXT=59,
|
||||
TRUE=60, USING=61, VERIFY=62, WHEN=63, WHERE=64, WITH=65, EQ=66, NEQ=67,
|
||||
LT=68, LTE=69, GT=70, GTE=71, PLUS=72, MINUS=73, ASTERISK=74, SLASH=75,
|
||||
PERCENT=76, CONCAT=77, STRING=78, INTEGER_VALUE=79, DECIMAL_VALUE=80,
|
||||
IDENTIFIER=81, DIGIT_IDENTIFIER=82, QUOTED_IDENTIFIER=83, BACKQUOTED_IDENTIFIER=84,
|
||||
SIMPLE_COMMENT=85, BRACKETED_COMMENT=86, WS=87, UNRECOGNIZED=88;
|
||||
TRUE=60, USING=61, VERIFY=62, WHERE=63, WITH=64, EQ=65, NEQ=66, LT=67,
|
||||
LTE=68, GT=69, GTE=70, PLUS=71, MINUS=72, ASTERISK=73, SLASH=74, PERCENT=75,
|
||||
CONCAT=76, STRING=77, INTEGER_VALUE=78, DECIMAL_VALUE=79, IDENTIFIER=80,
|
||||
DIGIT_IDENTIFIER=81, QUOTED_IDENTIFIER=82, BACKQUOTED_IDENTIFIER=83, SIMPLE_COMMENT=84,
|
||||
BRACKETED_COMMENT=85, WS=86, UNRECOGNIZED=87;
|
||||
public static String[] modeNames = {
|
||||
"DEFAULT_MODE"
|
||||
};
|
||||
|
@ -47,9 +47,9 @@ class SqlBaseLexer extends Lexer {
|
|||
"IS", "JOIN", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT",
|
||||
"NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL",
|
||||
"PLAN", "QUERY", "RIGHT", "RLIKE", "SCHEMAS", "SELECT", "SHOW", "TABLES",
|
||||
"TEXT", "TRUE", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "EQ", "NEQ",
|
||||
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
|
||||
"CONCAT", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER",
|
||||
"TEXT", "TRUE", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "LT",
|
||||
"LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT",
|
||||
"STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER",
|
||||
"QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER",
|
||||
"SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
|
||||
};
|
||||
|
@ -64,8 +64,8 @@ class SqlBaseLexer extends Lexer {
|
|||
"'NULL'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'",
|
||||
"'PHYSICAL'", "'PLAN'", "'QUERY'", "'RIGHT'", "'RLIKE'", "'SCHEMAS'",
|
||||
"'SELECT'", "'SHOW'", "'TABLES'", "'TEXT'", "'TRUE'", "'USING'", "'VERIFY'",
|
||||
"'WHEN'", "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='",
|
||||
"'+'", "'-'", "'*'", "'/'", "'%'", "'||'"
|
||||
"'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'",
|
||||
"'-'", "'*'", "'/'", "'%'", "'||'"
|
||||
};
|
||||
private static final String[] _SYMBOLIC_NAMES = {
|
||||
null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY",
|
||||
|
@ -75,9 +75,9 @@ class SqlBaseLexer extends Lexer {
|
|||
"IS", "JOIN", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT",
|
||||
"NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL",
|
||||
"PLAN", "QUERY", "RIGHT", "RLIKE", "SCHEMAS", "SELECT", "SHOW", "TABLES",
|
||||
"TEXT", "TRUE", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "EQ", "NEQ",
|
||||
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
|
||||
"CONCAT", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER",
|
||||
"TEXT", "TRUE", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "LT",
|
||||
"LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT",
|
||||
"STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER",
|
||||
"QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT",
|
||||
"WS", "UNRECOGNIZED"
|
||||
};
|
||||
|
@ -136,7 +136,7 @@ class SqlBaseLexer extends Lexer {
|
|||
public ATN getATN() { return _ATN; }
|
||||
|
||||
public static final String _serializedATN =
|
||||
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2Z\u02f5\b\1\4\2\t"+
|
||||
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2Y\u02ee\b\1\4\2\t"+
|
||||
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
|
||||
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
|
||||
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
|
||||
|
@ -146,252 +146,250 @@ class SqlBaseLexer extends Lexer {
|
|||
"\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t="+
|
||||
"\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+
|
||||
"\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+
|
||||
"\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\3\2\3\2\3\3\3\3\3\4"+
|
||||
"\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3"+
|
||||
"\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3"+
|
||||
"\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17"+
|
||||
"\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21"+
|
||||
"\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23"+
|
||||
"\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25"+
|
||||
"\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26"+
|
||||
"\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30"+
|
||||
"\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32"+
|
||||
"\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34"+
|
||||
"\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36"+
|
||||
"\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3"+
|
||||
" \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3$\3$\3$\3$\3$\3%\3"+
|
||||
"%\3%\3%\3%\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3"+
|
||||
"(\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3,\3"+
|
||||
",\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3\60"+
|
||||
"\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62"+
|
||||
"\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64"+
|
||||
"\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67"+
|
||||
"\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\38\38\39\39\39\39\39\39\3"+
|
||||
"9\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3"+
|
||||
">\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3"+
|
||||
"A\3B\3B\3B\3B\3B\3C\3C\3D\3D\3D\3D\3D\3D\3D\5D\u0241\nD\3E\3E\3F\3F\3"+
|
||||
"F\3G\3G\3H\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3N\3N\3N\3O\3O\3O\3O\7"+
|
||||
"O\u025e\nO\fO\16O\u0261\13O\3O\3O\3P\6P\u0266\nP\rP\16P\u0267\3Q\6Q\u026b"+
|
||||
"\nQ\rQ\16Q\u026c\3Q\3Q\7Q\u0271\nQ\fQ\16Q\u0274\13Q\3Q\3Q\6Q\u0278\nQ"+
|
||||
"\rQ\16Q\u0279\3Q\6Q\u027d\nQ\rQ\16Q\u027e\3Q\3Q\7Q\u0283\nQ\fQ\16Q\u0286"+
|
||||
"\13Q\5Q\u0288\nQ\3Q\3Q\3Q\3Q\6Q\u028e\nQ\rQ\16Q\u028f\3Q\3Q\5Q\u0294\n"+
|
||||
"Q\3R\3R\5R\u0298\nR\3R\3R\3R\7R\u029d\nR\fR\16R\u02a0\13R\3S\3S\3S\3S"+
|
||||
"\6S\u02a6\nS\rS\16S\u02a7\3T\3T\3T\3T\7T\u02ae\nT\fT\16T\u02b1\13T\3T"+
|
||||
"\3T\3U\3U\3U\3U\7U\u02b9\nU\fU\16U\u02bc\13U\3U\3U\3V\3V\5V\u02c2\nV\3"+
|
||||
"V\6V\u02c5\nV\rV\16V\u02c6\3W\3W\3X\3X\3Y\3Y\3Y\3Y\7Y\u02d1\nY\fY\16Y"+
|
||||
"\u02d4\13Y\3Y\5Y\u02d7\nY\3Y\5Y\u02da\nY\3Y\3Y\3Z\3Z\3Z\3Z\3Z\7Z\u02e3"+
|
||||
"\nZ\fZ\16Z\u02e6\13Z\3Z\3Z\3Z\3Z\3Z\3[\6[\u02ee\n[\r[\16[\u02ef\3[\3["+
|
||||
"\3\\\3\\\3\u02e4\2]\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r"+
|
||||
"\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33"+
|
||||
"\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63"+
|
||||
"e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089"+
|
||||
"F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009d"+
|
||||
"P\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab\2\u00ad\2\u00af\2\u00b1"+
|
||||
"W\u00b3X\u00b5Y\u00b7Z\3\2\13\3\2))\5\2<<BBaa\3\2$$\3\2bb\4\2--//\3\2"+
|
||||
"\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u0313\2\3\3\2\2\2\2\5\3\2"+
|
||||
"\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21"+
|
||||
"\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2"+
|
||||
"\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3"+
|
||||
"\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3"+
|
||||
"\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3"+
|
||||
"\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2"+
|
||||
"\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2"+
|
||||
"Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3"+
|
||||
"\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2"+
|
||||
"\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2"+
|
||||
"\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3"+
|
||||
"\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2"+
|
||||
"\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099"+
|
||||
"\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2"+
|
||||
"\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9\3\2\2\2\2\u00b1"+
|
||||
"\3\2\2\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\3\u00b9\3\2\2"+
|
||||
"\2\5\u00bb\3\2\2\2\7\u00bd\3\2\2\2\t\u00bf\3\2\2\2\13\u00c1\3\2\2\2\r"+
|
||||
"\u00c5\3\2\2\2\17\u00cd\3\2\2\2\21\u00d6\3\2\2\2\23\u00da\3\2\2\2\25\u00de"+
|
||||
"\3\2\2\2\27\u00e1\3\2\2\2\31\u00e5\3\2\2\2\33\u00ed\3\2\2\2\35\u00f0\3"+
|
||||
"\2\2\2\37\u00f5\3\2\2\2!\u00fd\3\2\2\2#\u0103\3\2\2\2%\u0108\3\2\2\2\'"+
|
||||
"\u0111\3\2\2\2)\u011a\3\2\2\2+\u0125\3\2\2\2-\u012c\3\2\2\2/\u0134\3\2"+
|
||||
"\2\2\61\u013c\3\2\2\2\63\u0142\3\2\2\2\65\u0149\3\2\2\2\67\u014e\3\2\2"+
|
||||
"\29\u0153\3\2\2\2;\u015d\3\2\2\2=\u0166\3\2\2\2?\u016c\3\2\2\2A\u0173"+
|
||||
"\3\2\2\2C\u0176\3\2\2\2E\u017c\3\2\2\2G\u017f\3\2\2\2I\u0184\3\2\2\2K"+
|
||||
"\u0189\3\2\2\2M\u018e\3\2\2\2O\u0194\3\2\2\2Q\u019b\3\2\2\2S\u01a1\3\2"+
|
||||
"\2\2U\u01a9\3\2\2\2W\u01ad\3\2\2\2Y\u01b2\3\2\2\2[\u01b5\3\2\2\2]\u01bf"+
|
||||
"\3\2\2\2_\u01c2\3\2\2\2a\u01c8\3\2\2\2c\u01ce\3\2\2\2e\u01d5\3\2\2\2g"+
|
||||
"\u01de\3\2\2\2i\u01e3\3\2\2\2k\u01e9\3\2\2\2m\u01ef\3\2\2\2o\u01f5\3\2"+
|
||||
"\2\2q\u01fd\3\2\2\2s\u0204\3\2\2\2u\u0209\3\2\2\2w\u0210\3\2\2\2y\u0215"+
|
||||
"\3\2\2\2{\u021a\3\2\2\2}\u0220\3\2\2\2\177\u0227\3\2\2\2\u0081\u022c\3"+
|
||||
"\2\2\2\u0083\u0232\3\2\2\2\u0085\u0237\3\2\2\2\u0087\u0240\3\2\2\2\u0089"+
|
||||
"\u0242\3\2\2\2\u008b\u0244\3\2\2\2\u008d\u0247\3\2\2\2\u008f\u0249\3\2"+
|
||||
"\2\2\u0091\u024c\3\2\2\2\u0093\u024e\3\2\2\2\u0095\u0250\3\2\2\2\u0097"+
|
||||
"\u0252\3\2\2\2\u0099\u0254\3\2\2\2\u009b\u0256\3\2\2\2\u009d\u0259\3\2"+
|
||||
"\2\2\u009f\u0265\3\2\2\2\u00a1\u0293\3\2\2\2\u00a3\u0297\3\2\2\2\u00a5"+
|
||||
"\u02a1\3\2\2\2\u00a7\u02a9\3\2\2\2\u00a9\u02b4\3\2\2\2\u00ab\u02bf\3\2"+
|
||||
"\2\2\u00ad\u02c8\3\2\2\2\u00af\u02ca\3\2\2\2\u00b1\u02cc\3\2\2\2\u00b3"+
|
||||
"\u02dd\3\2\2\2\u00b5\u02ed\3\2\2\2\u00b7\u02f3\3\2\2\2\u00b9\u00ba\7*"+
|
||||
"\2\2\u00ba\4\3\2\2\2\u00bb\u00bc\7+\2\2\u00bc\6\3\2\2\2\u00bd\u00be\7"+
|
||||
".\2\2\u00be\b\3\2\2\2\u00bf\u00c0\7\60\2\2\u00c0\n\3\2\2\2\u00c1\u00c2"+
|
||||
"\7C\2\2\u00c2\u00c3\7N\2\2\u00c3\u00c4\7N\2\2\u00c4\f\3\2\2\2\u00c5\u00c6"+
|
||||
"\7C\2\2\u00c6\u00c7\7P\2\2\u00c7\u00c8\7C\2\2\u00c8\u00c9\7N\2\2\u00c9"+
|
||||
"\u00ca\7[\2\2\u00ca\u00cb\7\\\2\2\u00cb\u00cc\7G\2\2\u00cc\16\3\2\2\2"+
|
||||
"\u00cd\u00ce\7C\2\2\u00ce\u00cf\7P\2\2\u00cf\u00d0\7C\2\2\u00d0\u00d1"+
|
||||
"\7N\2\2\u00d1\u00d2\7[\2\2\u00d2\u00d3\7\\\2\2\u00d3\u00d4\7G\2\2\u00d4"+
|
||||
"\u00d5\7F\2\2\u00d5\20\3\2\2\2\u00d6\u00d7\7C\2\2\u00d7\u00d8\7P\2\2\u00d8"+
|
||||
"\u00d9\7F\2\2\u00d9\22\3\2\2\2\u00da\u00db\7C\2\2\u00db\u00dc\7P\2\2\u00dc"+
|
||||
"\u00dd\7[\2\2\u00dd\24\3\2\2\2\u00de\u00df\7C\2\2\u00df\u00e0\7U\2\2\u00e0"+
|
||||
"\26\3\2\2\2\u00e1\u00e2\7C\2\2\u00e2\u00e3\7U\2\2\u00e3\u00e4\7E\2\2\u00e4"+
|
||||
"\30\3\2\2\2\u00e5\u00e6\7D\2\2\u00e6\u00e7\7G\2\2\u00e7\u00e8\7V\2\2\u00e8"+
|
||||
"\u00e9\7Y\2\2\u00e9\u00ea\7G\2\2\u00ea\u00eb\7G\2\2\u00eb\u00ec\7P\2\2"+
|
||||
"\u00ec\32\3\2\2\2\u00ed\u00ee\7D\2\2\u00ee\u00ef\7[\2\2\u00ef\34\3\2\2"+
|
||||
"\2\u00f0\u00f1\7E\2\2\u00f1\u00f2\7C\2\2\u00f2\u00f3\7U\2\2\u00f3\u00f4"+
|
||||
"\7V\2\2\u00f4\36\3\2\2\2\u00f5\u00f6\7E\2\2\u00f6\u00f7\7Q\2\2\u00f7\u00f8"+
|
||||
"\7N\2\2\u00f8\u00f9\7W\2\2\u00f9\u00fa\7O\2\2\u00fa\u00fb\7P\2\2\u00fb"+
|
||||
"\u00fc\7U\2\2\u00fc \3\2\2\2\u00fd\u00fe\7F\2\2\u00fe\u00ff\7G\2\2\u00ff"+
|
||||
"\u0100\7D\2\2\u0100\u0101\7W\2\2\u0101\u0102\7I\2\2\u0102\"\3\2\2\2\u0103"+
|
||||
"\u0104\7F\2\2\u0104\u0105\7G\2\2\u0105\u0106\7U\2\2\u0106\u0107\7E\2\2"+
|
||||
"\u0107$\3\2\2\2\u0108\u0109\7F\2\2\u0109\u010a\7G\2\2\u010a\u010b\7U\2"+
|
||||
"\2\u010b\u010c\7E\2\2\u010c\u010d\7T\2\2\u010d\u010e\7K\2\2\u010e\u010f"+
|
||||
"\7D\2\2\u010f\u0110\7G\2\2\u0110&\3\2\2\2\u0111\u0112\7F\2\2\u0112\u0113"+
|
||||
"\7K\2\2\u0113\u0114\7U\2\2\u0114\u0115\7V\2\2\u0115\u0116\7K\2\2\u0116"+
|
||||
"\u0117\7P\2\2\u0117\u0118\7E\2\2\u0118\u0119\7V\2\2\u0119(\3\2\2\2\u011a"+
|
||||
"\u011b\7G\2\2\u011b\u011c\7Z\2\2\u011c\u011d\7G\2\2\u011d\u011e\7E\2\2"+
|
||||
"\u011e\u011f\7W\2\2\u011f\u0120\7V\2\2\u0120\u0121\7C\2\2\u0121\u0122"+
|
||||
"\7D\2\2\u0122\u0123\7N\2\2\u0123\u0124\7G\2\2\u0124*\3\2\2\2\u0125\u0126"+
|
||||
"\7G\2\2\u0126\u0127\7Z\2\2\u0127\u0128\7K\2\2\u0128\u0129\7U\2\2\u0129"+
|
||||
"\u012a\7V\2\2\u012a\u012b\7U\2\2\u012b,\3\2\2\2\u012c\u012d\7G\2\2\u012d"+
|
||||
"\u012e\7Z\2\2\u012e\u012f\7R\2\2\u012f\u0130\7N\2\2\u0130\u0131\7C\2\2"+
|
||||
"\u0131\u0132\7K\2\2\u0132\u0133\7P\2\2\u0133.\3\2\2\2\u0134\u0135\7G\2"+
|
||||
"\2\u0135\u0136\7Z\2\2\u0136\u0137\7V\2\2\u0137\u0138\7T\2\2\u0138\u0139"+
|
||||
"\7C\2\2\u0139\u013a\7E\2\2\u013a\u013b\7V\2\2\u013b\60\3\2\2\2\u013c\u013d"+
|
||||
"\7H\2\2\u013d\u013e\7C\2\2\u013e\u013f\7N\2\2\u013f\u0140\7U\2\2\u0140"+
|
||||
"\u0141\7G\2\2\u0141\62\3\2\2\2\u0142\u0143\7H\2\2\u0143\u0144\7Q\2\2\u0144"+
|
||||
"\u0145\7T\2\2\u0145\u0146\7O\2\2\u0146\u0147\7C\2\2\u0147\u0148\7V\2\2"+
|
||||
"\u0148\64\3\2\2\2\u0149\u014a\7H\2\2\u014a\u014b\7T\2\2\u014b\u014c\7"+
|
||||
"Q\2\2\u014c\u014d\7O\2\2\u014d\66\3\2\2\2\u014e\u014f\7H\2\2\u014f\u0150"+
|
||||
"\7W\2\2\u0150\u0151\7N\2\2\u0151\u0152\7N\2\2\u01528\3\2\2\2\u0153\u0154"+
|
||||
"\7H\2\2\u0154\u0155\7W\2\2\u0155\u0156\7P\2\2\u0156\u0157\7E\2\2\u0157"+
|
||||
"\u0158\7V\2\2\u0158\u0159\7K\2\2\u0159\u015a\7Q\2\2\u015a\u015b\7P\2\2"+
|
||||
"\u015b\u015c\7U\2\2\u015c:\3\2\2\2\u015d\u015e\7I\2\2\u015e\u015f\7T\2"+
|
||||
"\2\u015f\u0160\7C\2\2\u0160\u0161\7R\2\2\u0161\u0162\7J\2\2\u0162\u0163"+
|
||||
"\7X\2\2\u0163\u0164\7K\2\2\u0164\u0165\7\\\2\2\u0165<\3\2\2\2\u0166\u0167"+
|
||||
"\7I\2\2\u0167\u0168\7T\2\2\u0168\u0169\7Q\2\2\u0169\u016a\7W\2\2\u016a"+
|
||||
"\u016b\7R\2\2\u016b>\3\2\2\2\u016c\u016d\7J\2\2\u016d\u016e\7C\2\2\u016e"+
|
||||
"\u016f\7X\2\2\u016f\u0170\7K\2\2\u0170\u0171\7P\2\2\u0171\u0172\7I\2\2"+
|
||||
"\u0172@\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u0175B\3\2\2\2\u0176"+
|
||||
"\u0177\7K\2\2\u0177\u0178\7P\2\2\u0178\u0179\7P\2\2\u0179\u017a\7G\2\2"+
|
||||
"\u017a\u017b\7T\2\2\u017bD\3\2\2\2\u017c\u017d\7K\2\2\u017d\u017e\7U\2"+
|
||||
"\2\u017eF\3\2\2\2\u017f\u0180\7L\2\2\u0180\u0181\7Q\2\2\u0181\u0182\7"+
|
||||
"K\2\2\u0182\u0183\7P\2\2\u0183H\3\2\2\2\u0184\u0185\7N\2\2\u0185\u0186"+
|
||||
"\7G\2\2\u0186\u0187\7H\2\2\u0187\u0188\7V\2\2\u0188J\3\2\2\2\u0189\u018a"+
|
||||
"\7N\2\2\u018a\u018b\7K\2\2\u018b\u018c\7M\2\2\u018c\u018d\7G\2\2\u018d"+
|
||||
"L\3\2\2\2\u018e\u018f\7N\2\2\u018f\u0190\7K\2\2\u0190\u0191\7O\2\2\u0191"+
|
||||
"\u0192\7K\2\2\u0192\u0193\7V\2\2\u0193N\3\2\2\2\u0194\u0195\7O\2\2\u0195"+
|
||||
"\u0196\7C\2\2\u0196\u0197\7R\2\2\u0197\u0198\7R\2\2\u0198\u0199\7G\2\2"+
|
||||
"\u0199\u019a\7F\2\2\u019aP\3\2\2\2\u019b\u019c\7O\2\2\u019c\u019d\7C\2"+
|
||||
"\2\u019d\u019e\7V\2\2\u019e\u019f\7E\2\2\u019f\u01a0\7J\2\2\u01a0R\3\2"+
|
||||
"\2\2\u01a1\u01a2\7P\2\2\u01a2\u01a3\7C\2\2\u01a3\u01a4\7V\2\2\u01a4\u01a5"+
|
||||
"\7W\2\2\u01a5\u01a6\7T\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7N\2\2\u01a8"+
|
||||
"T\3\2\2\2\u01a9\u01aa\7P\2\2\u01aa\u01ab\7Q\2\2\u01ab\u01ac\7V\2\2\u01ac"+
|
||||
"V\3\2\2\2\u01ad\u01ae\7P\2\2\u01ae\u01af\7W\2\2\u01af\u01b0\7N\2\2\u01b0"+
|
||||
"\u01b1\7N\2\2\u01b1X\3\2\2\2\u01b2\u01b3\7Q\2\2\u01b3\u01b4\7P\2\2\u01b4"+
|
||||
"Z\3\2\2\2\u01b5\u01b6\7Q\2\2\u01b6\u01b7\7R\2\2\u01b7\u01b8\7V\2\2\u01b8"+
|
||||
"\u01b9\7K\2\2\u01b9\u01ba\7O\2\2\u01ba\u01bb\7K\2\2\u01bb\u01bc\7\\\2"+
|
||||
"\2\u01bc\u01bd\7G\2\2\u01bd\u01be\7F\2\2\u01be\\\3\2\2\2\u01bf\u01c0\7"+
|
||||
"Q\2\2\u01c0\u01c1\7T\2\2\u01c1^\3\2\2\2\u01c2\u01c3\7Q\2\2\u01c3\u01c4"+
|
||||
"\7T\2\2\u01c4\u01c5\7F\2\2\u01c5\u01c6\7G\2\2\u01c6\u01c7\7T\2\2\u01c7"+
|
||||
"`\3\2\2\2\u01c8\u01c9\7Q\2\2\u01c9\u01ca\7W\2\2\u01ca\u01cb\7V\2\2\u01cb"+
|
||||
"\u01cc\7G\2\2\u01cc\u01cd\7T\2\2\u01cdb\3\2\2\2\u01ce\u01cf\7R\2\2\u01cf"+
|
||||
"\u01d0\7C\2\2\u01d0\u01d1\7T\2\2\u01d1\u01d2\7U\2\2\u01d2\u01d3\7G\2\2"+
|
||||
"\u01d3\u01d4\7F\2\2\u01d4d\3\2\2\2\u01d5\u01d6\7R\2\2\u01d6\u01d7\7J\2"+
|
||||
"\2\u01d7\u01d8\7[\2\2\u01d8\u01d9\7U\2\2\u01d9\u01da\7K\2\2\u01da\u01db"+
|
||||
"\7E\2\2\u01db\u01dc\7C\2\2\u01dc\u01dd\7N\2\2\u01ddf\3\2\2\2\u01de\u01df"+
|
||||
"\7R\2\2\u01df\u01e0\7N\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7P\2\2\u01e2"+
|
||||
"h\3\2\2\2\u01e3\u01e4\7S\2\2\u01e4\u01e5\7W\2\2\u01e5\u01e6\7G\2\2\u01e6"+
|
||||
"\u01e7\7T\2\2\u01e7\u01e8\7[\2\2\u01e8j\3\2\2\2\u01e9\u01ea\7T\2\2\u01ea"+
|
||||
"\u01eb\7K\2\2\u01eb\u01ec\7I\2\2\u01ec\u01ed\7J\2\2\u01ed\u01ee\7V\2\2"+
|
||||
"\u01eel\3\2\2\2\u01ef\u01f0\7T\2\2\u01f0\u01f1\7N\2\2\u01f1\u01f2\7K\2"+
|
||||
"\2\u01f2\u01f3\7M\2\2\u01f3\u01f4\7G\2\2\u01f4n\3\2\2\2\u01f5\u01f6\7"+
|
||||
"U\2\2\u01f6\u01f7\7E\2\2\u01f7\u01f8\7J\2\2\u01f8\u01f9\7G\2\2\u01f9\u01fa"+
|
||||
"\7O\2\2\u01fa\u01fb\7C\2\2\u01fb\u01fc\7U\2\2\u01fcp\3\2\2\2\u01fd\u01fe"+
|
||||
"\7U\2\2\u01fe\u01ff\7G\2\2\u01ff\u0200\7N\2\2\u0200\u0201\7G\2\2\u0201"+
|
||||
"\u0202\7E\2\2\u0202\u0203\7V\2\2\u0203r\3\2\2\2\u0204\u0205\7U\2\2\u0205"+
|
||||
"\u0206\7J\2\2\u0206\u0207\7Q\2\2\u0207\u0208\7Y\2\2\u0208t\3\2\2\2\u0209"+
|
||||
"\u020a\7V\2\2\u020a\u020b\7C\2\2\u020b\u020c\7D\2\2\u020c\u020d\7N\2\2"+
|
||||
"\u020d\u020e\7G\2\2\u020e\u020f\7U\2\2\u020fv\3\2\2\2\u0210\u0211\7V\2"+
|
||||
"\2\u0211\u0212\7G\2\2\u0212\u0213\7Z\2\2\u0213\u0214\7V\2\2\u0214x\3\2"+
|
||||
"\2\2\u0215\u0216\7V\2\2\u0216\u0217\7T\2\2\u0217\u0218\7W\2\2\u0218\u0219"+
|
||||
"\7G\2\2\u0219z\3\2\2\2\u021a\u021b\7W\2\2\u021b\u021c\7U\2\2\u021c\u021d"+
|
||||
"\7K\2\2\u021d\u021e\7P\2\2\u021e\u021f\7I\2\2\u021f|\3\2\2\2\u0220\u0221"+
|
||||
"\7X\2\2\u0221\u0222\7G\2\2\u0222\u0223\7T\2\2\u0223\u0224\7K\2\2\u0224"+
|
||||
"\u0225\7H\2\2\u0225\u0226\7[\2\2\u0226~\3\2\2\2\u0227\u0228\7Y\2\2\u0228"+
|
||||
"\u0229\7J\2\2\u0229\u022a\7G\2\2\u022a\u022b\7P\2\2\u022b\u0080\3\2\2"+
|
||||
"\2\u022c\u022d\7Y\2\2\u022d\u022e\7J\2\2\u022e\u022f\7G\2\2\u022f\u0230"+
|
||||
"\7T\2\2\u0230\u0231\7G\2\2\u0231\u0082\3\2\2\2\u0232\u0233\7Y\2\2\u0233"+
|
||||
"\u0234\7K\2\2\u0234\u0235\7V\2\2\u0235\u0236\7J\2\2\u0236\u0084\3\2\2"+
|
||||
"\2\u0237\u0238\7?\2\2\u0238\u0086\3\2\2\2\u0239\u023a\7>\2\2\u023a\u0241"+
|
||||
"\7@\2\2\u023b\u023c\7#\2\2\u023c\u0241\7?\2\2\u023d\u023e\7>\2\2\u023e"+
|
||||
"\u023f\7?\2\2\u023f\u0241\7@\2\2\u0240\u0239\3\2\2\2\u0240\u023b\3\2\2"+
|
||||
"\2\u0240\u023d\3\2\2\2\u0241\u0088\3\2\2\2\u0242\u0243\7>\2\2\u0243\u008a"+
|
||||
"\3\2\2\2\u0244\u0245\7>\2\2\u0245\u0246\7?\2\2\u0246\u008c\3\2\2\2\u0247"+
|
||||
"\u0248\7@\2\2\u0248\u008e\3\2\2\2\u0249\u024a\7@\2\2\u024a\u024b\7?\2"+
|
||||
"\2\u024b\u0090\3\2\2\2\u024c\u024d\7-\2\2\u024d\u0092\3\2\2\2\u024e\u024f"+
|
||||
"\7/\2\2\u024f\u0094\3\2\2\2\u0250\u0251\7,\2\2\u0251\u0096\3\2\2\2\u0252"+
|
||||
"\u0253\7\61\2\2\u0253\u0098\3\2\2\2\u0254\u0255\7\'\2\2\u0255\u009a\3"+
|
||||
"\2\2\2\u0256\u0257\7~\2\2\u0257\u0258\7~\2\2\u0258\u009c\3\2\2\2\u0259"+
|
||||
"\u025f\7)\2\2\u025a\u025e\n\2\2\2\u025b\u025c\7)\2\2\u025c\u025e\7)\2"+
|
||||
"\2\u025d\u025a\3\2\2\2\u025d\u025b\3\2\2\2\u025e\u0261\3\2\2\2\u025f\u025d"+
|
||||
"\3\2\2\2\u025f\u0260\3\2\2\2\u0260\u0262\3\2\2\2\u0261\u025f\3\2\2\2\u0262"+
|
||||
"\u0263\7)\2\2\u0263\u009e\3\2\2\2\u0264\u0266\5\u00adW\2\u0265\u0264\3"+
|
||||
"\2\2\2\u0266\u0267\3\2\2\2\u0267\u0265\3\2\2\2\u0267\u0268\3\2\2\2\u0268"+
|
||||
"\u00a0\3\2\2\2\u0269\u026b\5\u00adW\2\u026a\u0269\3\2\2\2\u026b\u026c"+
|
||||
"\3\2\2\2\u026c\u026a\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e"+
|
||||
"\u0272\7\60\2\2\u026f\u0271\5\u00adW\2\u0270\u026f\3\2\2\2\u0271\u0274"+
|
||||
"\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u0294\3\2\2\2\u0274"+
|
||||
"\u0272\3\2\2\2\u0275\u0277\7\60\2\2\u0276\u0278\5\u00adW\2\u0277\u0276"+
|
||||
"\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u0277\3\2\2\2\u0279\u027a\3\2\2\2\u027a"+
|
||||
"\u0294\3\2\2\2\u027b\u027d\5\u00adW\2\u027c\u027b\3\2\2\2\u027d\u027e"+
|
||||
"\3\2\2\2\u027e\u027c\3\2\2\2\u027e\u027f\3\2\2\2\u027f\u0287\3\2\2\2\u0280"+
|
||||
"\u0284\7\60\2\2\u0281\u0283\5\u00adW\2\u0282\u0281\3\2\2\2\u0283\u0286"+
|
||||
"\3\2\2\2\u0284\u0282\3\2\2\2\u0284\u0285\3\2\2\2\u0285\u0288\3\2\2\2\u0286"+
|
||||
"\u0284\3\2\2\2\u0287\u0280\3\2\2\2\u0287\u0288\3\2\2\2\u0288\u0289\3\2"+
|
||||
"\2\2\u0289\u028a\5\u00abV\2\u028a\u0294\3\2\2\2\u028b\u028d\7\60\2\2\u028c"+
|
||||
"\u028e\5\u00adW\2\u028d\u028c\3\2\2\2\u028e\u028f\3\2\2\2\u028f\u028d"+
|
||||
"\3\2\2\2\u028f\u0290\3\2\2\2\u0290\u0291\3\2\2\2\u0291\u0292\5\u00abV"+
|
||||
"\2\u0292\u0294\3\2\2\2\u0293\u026a\3\2\2\2\u0293\u0275\3\2\2\2\u0293\u027c"+
|
||||
"\3\2\2\2\u0293\u028b\3\2\2\2\u0294\u00a2\3\2\2\2\u0295\u0298\5\u00afX"+
|
||||
"\2\u0296\u0298\7a\2\2\u0297\u0295\3\2\2\2\u0297\u0296\3\2\2\2\u0298\u029e"+
|
||||
"\3\2\2\2\u0299\u029d\5\u00afX\2\u029a\u029d\5\u00adW\2\u029b\u029d\t\3"+
|
||||
"\2\2\u029c\u0299\3\2\2\2\u029c\u029a\3\2\2\2\u029c\u029b\3\2\2\2\u029d"+
|
||||
"\u02a0\3\2\2\2\u029e\u029c\3\2\2\2\u029e\u029f\3\2\2\2\u029f\u00a4\3\2"+
|
||||
"\2\2\u02a0\u029e\3\2\2\2\u02a1\u02a5\5\u00adW\2\u02a2\u02a6\5\u00afX\2"+
|
||||
"\u02a3\u02a6\5\u00adW\2\u02a4\u02a6\t\3\2\2\u02a5\u02a2\3\2\2\2\u02a5"+
|
||||
"\u02a3\3\2\2\2\u02a5\u02a4\3\2\2\2\u02a6\u02a7\3\2\2\2\u02a7\u02a5\3\2"+
|
||||
"\2\2\u02a7\u02a8\3\2\2\2\u02a8\u00a6\3\2\2\2\u02a9\u02af\7$\2\2\u02aa"+
|
||||
"\u02ae\n\4\2\2\u02ab\u02ac\7$\2\2\u02ac\u02ae\7$\2\2\u02ad\u02aa\3\2\2"+
|
||||
"\2\u02ad\u02ab\3\2\2\2\u02ae\u02b1\3\2\2\2\u02af\u02ad\3\2\2\2\u02af\u02b0"+
|
||||
"\3\2\2\2\u02b0\u02b2\3\2\2\2\u02b1\u02af\3\2\2\2\u02b2\u02b3\7$\2\2\u02b3"+
|
||||
"\u00a8\3\2\2\2\u02b4\u02ba\7b\2\2\u02b5\u02b9\n\5\2\2\u02b6\u02b7\7b\2"+
|
||||
"\2\u02b7\u02b9\7b\2\2\u02b8\u02b5\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02bc"+
|
||||
"\3\2\2\2\u02ba\u02b8\3\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02bd\3\2\2\2\u02bc"+
|
||||
"\u02ba\3\2\2\2\u02bd\u02be\7b\2\2\u02be\u00aa\3\2\2\2\u02bf\u02c1\7G\2"+
|
||||
"\2\u02c0\u02c2\t\6\2\2\u02c1\u02c0\3\2\2\2\u02c1\u02c2\3\2\2\2\u02c2\u02c4"+
|
||||
"\3\2\2\2\u02c3\u02c5\5\u00adW\2\u02c4\u02c3\3\2\2\2\u02c5\u02c6\3\2\2"+
|
||||
"\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2\u02c7\u00ac\3\2\2\2\u02c8\u02c9"+
|
||||
"\t\7\2\2\u02c9\u00ae\3\2\2\2\u02ca\u02cb\t\b\2\2\u02cb\u00b0\3\2\2\2\u02cc"+
|
||||
"\u02cd\7/\2\2\u02cd\u02ce\7/\2\2\u02ce\u02d2\3\2\2\2\u02cf\u02d1\n\t\2"+
|
||||
"\2\u02d0\u02cf\3\2\2\2\u02d1\u02d4\3\2\2\2\u02d2\u02d0\3\2\2\2\u02d2\u02d3"+
|
||||
"\3\2\2\2\u02d3\u02d6\3\2\2\2\u02d4\u02d2\3\2\2\2\u02d5\u02d7\7\17\2\2"+
|
||||
"\u02d6\u02d5\3\2\2\2\u02d6\u02d7\3\2\2\2\u02d7\u02d9\3\2\2\2\u02d8\u02da"+
|
||||
"\7\f\2\2\u02d9\u02d8\3\2\2\2\u02d9\u02da\3\2\2\2\u02da\u02db\3\2\2\2\u02db"+
|
||||
"\u02dc\bY\2\2\u02dc\u00b2\3\2\2\2\u02dd\u02de\7\61\2\2\u02de\u02df\7,"+
|
||||
"\2\2\u02df\u02e4\3\2\2\2\u02e0\u02e3\5\u00b3Z\2\u02e1\u02e3\13\2\2\2\u02e2"+
|
||||
"\u02e0\3\2\2\2\u02e2\u02e1\3\2\2\2\u02e3\u02e6\3\2\2\2\u02e4\u02e5\3\2"+
|
||||
"\2\2\u02e4\u02e2\3\2\2\2\u02e5\u02e7\3\2\2\2\u02e6\u02e4\3\2\2\2\u02e7"+
|
||||
"\u02e8\7,\2\2\u02e8\u02e9\7\61\2\2\u02e9\u02ea\3\2\2\2\u02ea\u02eb\bZ"+
|
||||
"\2\2\u02eb\u00b4\3\2\2\2\u02ec\u02ee\t\n\2\2\u02ed\u02ec\3\2\2\2\u02ee"+
|
||||
"\u02ef\3\2\2\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2\2\u02f0\u02f1\3\2"+
|
||||
"\2\2\u02f1\u02f2\b[\2\2\u02f2\u00b6\3\2\2\2\u02f3\u02f4\13\2\2\2\u02f4"+
|
||||
"\u00b8\3\2\2\2 \2\u0240\u025d\u025f\u0267\u026c\u0272\u0279\u027e\u0284"+
|
||||
"\u0287\u028f\u0293\u0297\u029c\u029e\u02a5\u02a7\u02ad\u02af\u02b8\u02ba"+
|
||||
"\u02c1\u02c6\u02d2\u02d6\u02d9\u02e2\u02e4\u02ef\3\2\3\2";
|
||||
"\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\3\2\3\2\3\3\3\3\3\4\3\4\3\5"+
|
||||
"\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3"+
|
||||
"\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f"+
|
||||
"\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17"+
|
||||
"\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21"+
|
||||
"\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23"+
|
||||
"\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25"+
|
||||
"\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26"+
|
||||
"\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30"+
|
||||
"\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+
|
||||
"\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\35"+
|
||||
"\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36"+
|
||||
"\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3"+
|
||||
" \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3"+
|
||||
"%\3%\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3)\3"+
|
||||
")\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3,\3,\3-\3"+
|
||||
"-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3"+
|
||||
"\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3"+
|
||||
"\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3"+
|
||||
"\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3"+
|
||||
"\67\3\67\3\67\3\67\38\38\38\38\38\38\38\38\39\39\39\39\39\39\39\3:\3:"+
|
||||
"\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3>\3>"+
|
||||
"\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B"+
|
||||
"\3C\3C\3C\3C\3C\3C\3C\5C\u023a\nC\3D\3D\3E\3E\3E\3F\3F\3G\3G\3G\3H\3H"+
|
||||
"\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3M\3N\3N\3N\3N\7N\u0257\nN\fN\16N\u025a"+
|
||||
"\13N\3N\3N\3O\6O\u025f\nO\rO\16O\u0260\3P\6P\u0264\nP\rP\16P\u0265\3P"+
|
||||
"\3P\7P\u026a\nP\fP\16P\u026d\13P\3P\3P\6P\u0271\nP\rP\16P\u0272\3P\6P"+
|
||||
"\u0276\nP\rP\16P\u0277\3P\3P\7P\u027c\nP\fP\16P\u027f\13P\5P\u0281\nP"+
|
||||
"\3P\3P\3P\3P\6P\u0287\nP\rP\16P\u0288\3P\3P\5P\u028d\nP\3Q\3Q\5Q\u0291"+
|
||||
"\nQ\3Q\3Q\3Q\7Q\u0296\nQ\fQ\16Q\u0299\13Q\3R\3R\3R\3R\6R\u029f\nR\rR\16"+
|
||||
"R\u02a0\3S\3S\3S\3S\7S\u02a7\nS\fS\16S\u02aa\13S\3S\3S\3T\3T\3T\3T\7T"+
|
||||
"\u02b2\nT\fT\16T\u02b5\13T\3T\3T\3U\3U\5U\u02bb\nU\3U\6U\u02be\nU\rU\16"+
|
||||
"U\u02bf\3V\3V\3W\3W\3X\3X\3X\3X\7X\u02ca\nX\fX\16X\u02cd\13X\3X\5X\u02d0"+
|
||||
"\nX\3X\5X\u02d3\nX\3X\3X\3Y\3Y\3Y\3Y\3Y\7Y\u02dc\nY\fY\16Y\u02df\13Y\3"+
|
||||
"Y\3Y\3Y\3Y\3Y\3Z\6Z\u02e7\nZ\rZ\16Z\u02e8\3Z\3Z\3[\3[\3\u02dd\2\\\3\3"+
|
||||
"\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21"+
|
||||
"!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!"+
|
||||
"A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s"+
|
||||
";u<w=y>{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008f"+
|
||||
"I\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3"+
|
||||
"S\u00a5T\u00a7U\u00a9\2\u00ab\2\u00ad\2\u00afV\u00b1W\u00b3X\u00b5Y\3"+
|
||||
"\2\13\3\2))\5\2<<BBaa\3\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17"+
|
||||
"\5\2\13\f\17\17\"\"\u030c\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2"+
|
||||
"\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2"+
|
||||
"\25\3\2\2\2\2\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3"+
|
||||
"\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2"+
|
||||
"\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67"+
|
||||
"\3\2\2\2\29\3\2\2\2\2;\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2"+
|
||||
"\2\2\2E\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2"+
|
||||
"\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]"+
|
||||
"\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2"+
|
||||
"\2\2\2k\3\2\2\2\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2"+
|
||||
"\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2"+
|
||||
"\2\2\2\u0083\3\2\2\2\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2"+
|
||||
"\u008b\3\2\2\2\2\u008d\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093"+
|
||||
"\3\2\2\2\2\u0095\3\2\2\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2"+
|
||||
"\2\2\u009d\3\2\2\2\2\u009f\3\2\2\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5"+
|
||||
"\3\2\2\2\2\u00a7\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2\2\2\u00b3\3\2\2"+
|
||||
"\2\2\u00b5\3\2\2\2\3\u00b7\3\2\2\2\5\u00b9\3\2\2\2\7\u00bb\3\2\2\2\t\u00bd"+
|
||||
"\3\2\2\2\13\u00bf\3\2\2\2\r\u00c3\3\2\2\2\17\u00cb\3\2\2\2\21\u00d4\3"+
|
||||
"\2\2\2\23\u00d8\3\2\2\2\25\u00dc\3\2\2\2\27\u00df\3\2\2\2\31\u00e3\3\2"+
|
||||
"\2\2\33\u00eb\3\2\2\2\35\u00ee\3\2\2\2\37\u00f3\3\2\2\2!\u00fb\3\2\2\2"+
|
||||
"#\u0101\3\2\2\2%\u0106\3\2\2\2\'\u010f\3\2\2\2)\u0118\3\2\2\2+\u0123\3"+
|
||||
"\2\2\2-\u012a\3\2\2\2/\u0132\3\2\2\2\61\u013a\3\2\2\2\63\u0140\3\2\2\2"+
|
||||
"\65\u0147\3\2\2\2\67\u014c\3\2\2\29\u0151\3\2\2\2;\u015b\3\2\2\2=\u0164"+
|
||||
"\3\2\2\2?\u016a\3\2\2\2A\u0171\3\2\2\2C\u0174\3\2\2\2E\u017a\3\2\2\2G"+
|
||||
"\u017d\3\2\2\2I\u0182\3\2\2\2K\u0187\3\2\2\2M\u018c\3\2\2\2O\u0192\3\2"+
|
||||
"\2\2Q\u0199\3\2\2\2S\u019f\3\2\2\2U\u01a7\3\2\2\2W\u01ab\3\2\2\2Y\u01b0"+
|
||||
"\3\2\2\2[\u01b3\3\2\2\2]\u01bd\3\2\2\2_\u01c0\3\2\2\2a\u01c6\3\2\2\2c"+
|
||||
"\u01cc\3\2\2\2e\u01d3\3\2\2\2g\u01dc\3\2\2\2i\u01e1\3\2\2\2k\u01e7\3\2"+
|
||||
"\2\2m\u01ed\3\2\2\2o\u01f3\3\2\2\2q\u01fb\3\2\2\2s\u0202\3\2\2\2u\u0207"+
|
||||
"\3\2\2\2w\u020e\3\2\2\2y\u0213\3\2\2\2{\u0218\3\2\2\2}\u021e\3\2\2\2\177"+
|
||||
"\u0225\3\2\2\2\u0081\u022b\3\2\2\2\u0083\u0230\3\2\2\2\u0085\u0239\3\2"+
|
||||
"\2\2\u0087\u023b\3\2\2\2\u0089\u023d\3\2\2\2\u008b\u0240\3\2\2\2\u008d"+
|
||||
"\u0242\3\2\2\2\u008f\u0245\3\2\2\2\u0091\u0247\3\2\2\2\u0093\u0249\3\2"+
|
||||
"\2\2\u0095\u024b\3\2\2\2\u0097\u024d\3\2\2\2\u0099\u024f\3\2\2\2\u009b"+
|
||||
"\u0252\3\2\2\2\u009d\u025e\3\2\2\2\u009f\u028c\3\2\2\2\u00a1\u0290\3\2"+
|
||||
"\2\2\u00a3\u029a\3\2\2\2\u00a5\u02a2\3\2\2\2\u00a7\u02ad\3\2\2\2\u00a9"+
|
||||
"\u02b8\3\2\2\2\u00ab\u02c1\3\2\2\2\u00ad\u02c3\3\2\2\2\u00af\u02c5\3\2"+
|
||||
"\2\2\u00b1\u02d6\3\2\2\2\u00b3\u02e6\3\2\2\2\u00b5\u02ec\3\2\2\2\u00b7"+
|
||||
"\u00b8\7*\2\2\u00b8\4\3\2\2\2\u00b9\u00ba\7+\2\2\u00ba\6\3\2\2\2\u00bb"+
|
||||
"\u00bc\7.\2\2\u00bc\b\3\2\2\2\u00bd\u00be\7\60\2\2\u00be\n\3\2\2\2\u00bf"+
|
||||
"\u00c0\7C\2\2\u00c0\u00c1\7N\2\2\u00c1\u00c2\7N\2\2\u00c2\f\3\2\2\2\u00c3"+
|
||||
"\u00c4\7C\2\2\u00c4\u00c5\7P\2\2\u00c5\u00c6\7C\2\2\u00c6\u00c7\7N\2\2"+
|
||||
"\u00c7\u00c8\7[\2\2\u00c8\u00c9\7\\\2\2\u00c9\u00ca\7G\2\2\u00ca\16\3"+
|
||||
"\2\2\2\u00cb\u00cc\7C\2\2\u00cc\u00cd\7P\2\2\u00cd\u00ce\7C\2\2\u00ce"+
|
||||
"\u00cf\7N\2\2\u00cf\u00d0\7[\2\2\u00d0\u00d1\7\\\2\2\u00d1\u00d2\7G\2"+
|
||||
"\2\u00d2\u00d3\7F\2\2\u00d3\20\3\2\2\2\u00d4\u00d5\7C\2\2\u00d5\u00d6"+
|
||||
"\7P\2\2\u00d6\u00d7\7F\2\2\u00d7\22\3\2\2\2\u00d8\u00d9\7C\2\2\u00d9\u00da"+
|
||||
"\7P\2\2\u00da\u00db\7[\2\2\u00db\24\3\2\2\2\u00dc\u00dd\7C\2\2\u00dd\u00de"+
|
||||
"\7U\2\2\u00de\26\3\2\2\2\u00df\u00e0\7C\2\2\u00e0\u00e1\7U\2\2\u00e1\u00e2"+
|
||||
"\7E\2\2\u00e2\30\3\2\2\2\u00e3\u00e4\7D\2\2\u00e4\u00e5\7G\2\2\u00e5\u00e6"+
|
||||
"\7V\2\2\u00e6\u00e7\7Y\2\2\u00e7\u00e8\7G\2\2\u00e8\u00e9\7G\2\2\u00e9"+
|
||||
"\u00ea\7P\2\2\u00ea\32\3\2\2\2\u00eb\u00ec\7D\2\2\u00ec\u00ed\7[\2\2\u00ed"+
|
||||
"\34\3\2\2\2\u00ee\u00ef\7E\2\2\u00ef\u00f0\7C\2\2\u00f0\u00f1\7U\2\2\u00f1"+
|
||||
"\u00f2\7V\2\2\u00f2\36\3\2\2\2\u00f3\u00f4\7E\2\2\u00f4\u00f5\7Q\2\2\u00f5"+
|
||||
"\u00f6\7N\2\2\u00f6\u00f7\7W\2\2\u00f7\u00f8\7O\2\2\u00f8\u00f9\7P\2\2"+
|
||||
"\u00f9\u00fa\7U\2\2\u00fa \3\2\2\2\u00fb\u00fc\7F\2\2\u00fc\u00fd\7G\2"+
|
||||
"\2\u00fd\u00fe\7D\2\2\u00fe\u00ff\7W\2\2\u00ff\u0100\7I\2\2\u0100\"\3"+
|
||||
"\2\2\2\u0101\u0102\7F\2\2\u0102\u0103\7G\2\2\u0103\u0104\7U\2\2\u0104"+
|
||||
"\u0105\7E\2\2\u0105$\3\2\2\2\u0106\u0107\7F\2\2\u0107\u0108\7G\2\2\u0108"+
|
||||
"\u0109\7U\2\2\u0109\u010a\7E\2\2\u010a\u010b\7T\2\2\u010b\u010c\7K\2\2"+
|
||||
"\u010c\u010d\7D\2\2\u010d\u010e\7G\2\2\u010e&\3\2\2\2\u010f\u0110\7F\2"+
|
||||
"\2\u0110\u0111\7K\2\2\u0111\u0112\7U\2\2\u0112\u0113\7V\2\2\u0113\u0114"+
|
||||
"\7K\2\2\u0114\u0115\7P\2\2\u0115\u0116\7E\2\2\u0116\u0117\7V\2\2\u0117"+
|
||||
"(\3\2\2\2\u0118\u0119\7G\2\2\u0119\u011a\7Z\2\2\u011a\u011b\7G\2\2\u011b"+
|
||||
"\u011c\7E\2\2\u011c\u011d\7W\2\2\u011d\u011e\7V\2\2\u011e\u011f\7C\2\2"+
|
||||
"\u011f\u0120\7D\2\2\u0120\u0121\7N\2\2\u0121\u0122\7G\2\2\u0122*\3\2\2"+
|
||||
"\2\u0123\u0124\7G\2\2\u0124\u0125\7Z\2\2\u0125\u0126\7K\2\2\u0126\u0127"+
|
||||
"\7U\2\2\u0127\u0128\7V\2\2\u0128\u0129\7U\2\2\u0129,\3\2\2\2\u012a\u012b"+
|
||||
"\7G\2\2\u012b\u012c\7Z\2\2\u012c\u012d\7R\2\2\u012d\u012e\7N\2\2\u012e"+
|
||||
"\u012f\7C\2\2\u012f\u0130\7K\2\2\u0130\u0131\7P\2\2\u0131.\3\2\2\2\u0132"+
|
||||
"\u0133\7G\2\2\u0133\u0134\7Z\2\2\u0134\u0135\7V\2\2\u0135\u0136\7T\2\2"+
|
||||
"\u0136\u0137\7C\2\2\u0137\u0138\7E\2\2\u0138\u0139\7V\2\2\u0139\60\3\2"+
|
||||
"\2\2\u013a\u013b\7H\2\2\u013b\u013c\7C\2\2\u013c\u013d\7N\2\2\u013d\u013e"+
|
||||
"\7U\2\2\u013e\u013f\7G\2\2\u013f\62\3\2\2\2\u0140\u0141\7H\2\2\u0141\u0142"+
|
||||
"\7Q\2\2\u0142\u0143\7T\2\2\u0143\u0144\7O\2\2\u0144\u0145\7C\2\2\u0145"+
|
||||
"\u0146\7V\2\2\u0146\64\3\2\2\2\u0147\u0148\7H\2\2\u0148\u0149\7T\2\2\u0149"+
|
||||
"\u014a\7Q\2\2\u014a\u014b\7O\2\2\u014b\66\3\2\2\2\u014c\u014d\7H\2\2\u014d"+
|
||||
"\u014e\7W\2\2\u014e\u014f\7N\2\2\u014f\u0150\7N\2\2\u01508\3\2\2\2\u0151"+
|
||||
"\u0152\7H\2\2\u0152\u0153\7W\2\2\u0153\u0154\7P\2\2\u0154\u0155\7E\2\2"+
|
||||
"\u0155\u0156\7V\2\2\u0156\u0157\7K\2\2\u0157\u0158\7Q\2\2\u0158\u0159"+
|
||||
"\7P\2\2\u0159\u015a\7U\2\2\u015a:\3\2\2\2\u015b\u015c\7I\2\2\u015c\u015d"+
|
||||
"\7T\2\2\u015d\u015e\7C\2\2\u015e\u015f\7R\2\2\u015f\u0160\7J\2\2\u0160"+
|
||||
"\u0161\7X\2\2\u0161\u0162\7K\2\2\u0162\u0163\7\\\2\2\u0163<\3\2\2\2\u0164"+
|
||||
"\u0165\7I\2\2\u0165\u0166\7T\2\2\u0166\u0167\7Q\2\2\u0167\u0168\7W\2\2"+
|
||||
"\u0168\u0169\7R\2\2\u0169>\3\2\2\2\u016a\u016b\7J\2\2\u016b\u016c\7C\2"+
|
||||
"\2\u016c\u016d\7X\2\2\u016d\u016e\7K\2\2\u016e\u016f\7P\2\2\u016f\u0170"+
|
||||
"\7I\2\2\u0170@\3\2\2\2\u0171\u0172\7K\2\2\u0172\u0173\7P\2\2\u0173B\3"+
|
||||
"\2\2\2\u0174\u0175\7K\2\2\u0175\u0176\7P\2\2\u0176\u0177\7P\2\2\u0177"+
|
||||
"\u0178\7G\2\2\u0178\u0179\7T\2\2\u0179D\3\2\2\2\u017a\u017b\7K\2\2\u017b"+
|
||||
"\u017c\7U\2\2\u017cF\3\2\2\2\u017d\u017e\7L\2\2\u017e\u017f\7Q\2\2\u017f"+
|
||||
"\u0180\7K\2\2\u0180\u0181\7P\2\2\u0181H\3\2\2\2\u0182\u0183\7N\2\2\u0183"+
|
||||
"\u0184\7G\2\2\u0184\u0185\7H\2\2\u0185\u0186\7V\2\2\u0186J\3\2\2\2\u0187"+
|
||||
"\u0188\7N\2\2\u0188\u0189\7K\2\2\u0189\u018a\7M\2\2\u018a\u018b\7G\2\2"+
|
||||
"\u018bL\3\2\2\2\u018c\u018d\7N\2\2\u018d\u018e\7K\2\2\u018e\u018f\7O\2"+
|
||||
"\2\u018f\u0190\7K\2\2\u0190\u0191\7V\2\2\u0191N\3\2\2\2\u0192\u0193\7"+
|
||||
"O\2\2\u0193\u0194\7C\2\2\u0194\u0195\7R\2\2\u0195\u0196\7R\2\2\u0196\u0197"+
|
||||
"\7G\2\2\u0197\u0198\7F\2\2\u0198P\3\2\2\2\u0199\u019a\7O\2\2\u019a\u019b"+
|
||||
"\7C\2\2\u019b\u019c\7V\2\2\u019c\u019d\7E\2\2\u019d\u019e\7J\2\2\u019e"+
|
||||
"R\3\2\2\2\u019f\u01a0\7P\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7V\2\2\u01a2"+
|
||||
"\u01a3\7W\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7C\2\2\u01a5\u01a6\7N\2\2"+
|
||||
"\u01a6T\3\2\2\2\u01a7\u01a8\7P\2\2\u01a8\u01a9\7Q\2\2\u01a9\u01aa\7V\2"+
|
||||
"\2\u01aaV\3\2\2\2\u01ab\u01ac\7P\2\2\u01ac\u01ad\7W\2\2\u01ad\u01ae\7"+
|
||||
"N\2\2\u01ae\u01af\7N\2\2\u01afX\3\2\2\2\u01b0\u01b1\7Q\2\2\u01b1\u01b2"+
|
||||
"\7P\2\2\u01b2Z\3\2\2\2\u01b3\u01b4\7Q\2\2\u01b4\u01b5\7R\2\2\u01b5\u01b6"+
|
||||
"\7V\2\2\u01b6\u01b7\7K\2\2\u01b7\u01b8\7O\2\2\u01b8\u01b9\7K\2\2\u01b9"+
|
||||
"\u01ba\7\\\2\2\u01ba\u01bb\7G\2\2\u01bb\u01bc\7F\2\2\u01bc\\\3\2\2\2\u01bd"+
|
||||
"\u01be\7Q\2\2\u01be\u01bf\7T\2\2\u01bf^\3\2\2\2\u01c0\u01c1\7Q\2\2\u01c1"+
|
||||
"\u01c2\7T\2\2\u01c2\u01c3\7F\2\2\u01c3\u01c4\7G\2\2\u01c4\u01c5\7T\2\2"+
|
||||
"\u01c5`\3\2\2\2\u01c6\u01c7\7Q\2\2\u01c7\u01c8\7W\2\2\u01c8\u01c9\7V\2"+
|
||||
"\2\u01c9\u01ca\7G\2\2\u01ca\u01cb\7T\2\2\u01cbb\3\2\2\2\u01cc\u01cd\7"+
|
||||
"R\2\2\u01cd\u01ce\7C\2\2\u01ce\u01cf\7T\2\2\u01cf\u01d0\7U\2\2\u01d0\u01d1"+
|
||||
"\7G\2\2\u01d1\u01d2\7F\2\2\u01d2d\3\2\2\2\u01d3\u01d4\7R\2\2\u01d4\u01d5"+
|
||||
"\7J\2\2\u01d5\u01d6\7[\2\2\u01d6\u01d7\7U\2\2\u01d7\u01d8\7K\2\2\u01d8"+
|
||||
"\u01d9\7E\2\2\u01d9\u01da\7C\2\2\u01da\u01db\7N\2\2\u01dbf\3\2\2\2\u01dc"+
|
||||
"\u01dd\7R\2\2\u01dd\u01de\7N\2\2\u01de\u01df\7C\2\2\u01df\u01e0\7P\2\2"+
|
||||
"\u01e0h\3\2\2\2\u01e1\u01e2\7S\2\2\u01e2\u01e3\7W\2\2\u01e3\u01e4\7G\2"+
|
||||
"\2\u01e4\u01e5\7T\2\2\u01e5\u01e6\7[\2\2\u01e6j\3\2\2\2\u01e7\u01e8\7"+
|
||||
"T\2\2\u01e8\u01e9\7K\2\2\u01e9\u01ea\7I\2\2\u01ea\u01eb\7J\2\2\u01eb\u01ec"+
|
||||
"\7V\2\2\u01ecl\3\2\2\2\u01ed\u01ee\7T\2\2\u01ee\u01ef\7N\2\2\u01ef\u01f0"+
|
||||
"\7K\2\2\u01f0\u01f1\7M\2\2\u01f1\u01f2\7G\2\2\u01f2n\3\2\2\2\u01f3\u01f4"+
|
||||
"\7U\2\2\u01f4\u01f5\7E\2\2\u01f5\u01f6\7J\2\2\u01f6\u01f7\7G\2\2\u01f7"+
|
||||
"\u01f8\7O\2\2\u01f8\u01f9\7C\2\2\u01f9\u01fa\7U\2\2\u01fap\3\2\2\2\u01fb"+
|
||||
"\u01fc\7U\2\2\u01fc\u01fd\7G\2\2\u01fd\u01fe\7N\2\2\u01fe\u01ff\7G\2\2"+
|
||||
"\u01ff\u0200\7E\2\2\u0200\u0201\7V\2\2\u0201r\3\2\2\2\u0202\u0203\7U\2"+
|
||||
"\2\u0203\u0204\7J\2\2\u0204\u0205\7Q\2\2\u0205\u0206\7Y\2\2\u0206t\3\2"+
|
||||
"\2\2\u0207\u0208\7V\2\2\u0208\u0209\7C\2\2\u0209\u020a\7D\2\2\u020a\u020b"+
|
||||
"\7N\2\2\u020b\u020c\7G\2\2\u020c\u020d\7U\2\2\u020dv\3\2\2\2\u020e\u020f"+
|
||||
"\7V\2\2\u020f\u0210\7G\2\2\u0210\u0211\7Z\2\2\u0211\u0212\7V\2\2\u0212"+
|
||||
"x\3\2\2\2\u0213\u0214\7V\2\2\u0214\u0215\7T\2\2\u0215\u0216\7W\2\2\u0216"+
|
||||
"\u0217\7G\2\2\u0217z\3\2\2\2\u0218\u0219\7W\2\2\u0219\u021a\7U\2\2\u021a"+
|
||||
"\u021b\7K\2\2\u021b\u021c\7P\2\2\u021c\u021d\7I\2\2\u021d|\3\2\2\2\u021e"+
|
||||
"\u021f\7X\2\2\u021f\u0220\7G\2\2\u0220\u0221\7T\2\2\u0221\u0222\7K\2\2"+
|
||||
"\u0222\u0223\7H\2\2\u0223\u0224\7[\2\2\u0224~\3\2\2\2\u0225\u0226\7Y\2"+
|
||||
"\2\u0226\u0227\7J\2\2\u0227\u0228\7G\2\2\u0228\u0229\7T\2\2\u0229\u022a"+
|
||||
"\7G\2\2\u022a\u0080\3\2\2\2\u022b\u022c\7Y\2\2\u022c\u022d\7K\2\2\u022d"+
|
||||
"\u022e\7V\2\2\u022e\u022f\7J\2\2\u022f\u0082\3\2\2\2\u0230\u0231\7?\2"+
|
||||
"\2\u0231\u0084\3\2\2\2\u0232\u0233\7>\2\2\u0233\u023a\7@\2\2\u0234\u0235"+
|
||||
"\7#\2\2\u0235\u023a\7?\2\2\u0236\u0237\7>\2\2\u0237\u0238\7?\2\2\u0238"+
|
||||
"\u023a\7@\2\2\u0239\u0232\3\2\2\2\u0239\u0234\3\2\2\2\u0239\u0236\3\2"+
|
||||
"\2\2\u023a\u0086\3\2\2\2\u023b\u023c\7>\2\2\u023c\u0088\3\2\2\2\u023d"+
|
||||
"\u023e\7>\2\2\u023e\u023f\7?\2\2\u023f\u008a\3\2\2\2\u0240\u0241\7@\2"+
|
||||
"\2\u0241\u008c\3\2\2\2\u0242\u0243\7@\2\2\u0243\u0244\7?\2\2\u0244\u008e"+
|
||||
"\3\2\2\2\u0245\u0246\7-\2\2\u0246\u0090\3\2\2\2\u0247\u0248\7/\2\2\u0248"+
|
||||
"\u0092\3\2\2\2\u0249\u024a\7,\2\2\u024a\u0094\3\2\2\2\u024b\u024c\7\61"+
|
||||
"\2\2\u024c\u0096\3\2\2\2\u024d\u024e\7\'\2\2\u024e\u0098\3\2\2\2\u024f"+
|
||||
"\u0250\7~\2\2\u0250\u0251\7~\2\2\u0251\u009a\3\2\2\2\u0252\u0258\7)\2"+
|
||||
"\2\u0253\u0257\n\2\2\2\u0254\u0255\7)\2\2\u0255\u0257\7)\2\2\u0256\u0253"+
|
||||
"\3\2\2\2\u0256\u0254\3\2\2\2\u0257\u025a\3\2\2\2\u0258\u0256\3\2\2\2\u0258"+
|
||||
"\u0259\3\2\2\2\u0259\u025b\3\2\2\2\u025a\u0258\3\2\2\2\u025b\u025c\7)"+
|
||||
"\2\2\u025c\u009c\3\2\2\2\u025d\u025f\5\u00abV\2\u025e\u025d\3\2\2\2\u025f"+
|
||||
"\u0260\3\2\2\2\u0260\u025e\3\2\2\2\u0260\u0261\3\2\2\2\u0261\u009e\3\2"+
|
||||
"\2\2\u0262\u0264\5\u00abV\2\u0263\u0262\3\2\2\2\u0264\u0265\3\2\2\2\u0265"+
|
||||
"\u0263\3\2\2\2\u0265\u0266\3\2\2\2\u0266\u0267\3\2\2\2\u0267\u026b\7\60"+
|
||||
"\2\2\u0268\u026a\5\u00abV\2\u0269\u0268\3\2\2\2\u026a\u026d\3\2\2\2\u026b"+
|
||||
"\u0269\3\2\2\2\u026b\u026c\3\2\2\2\u026c\u028d\3\2\2\2\u026d\u026b\3\2"+
|
||||
"\2\2\u026e\u0270\7\60\2\2\u026f\u0271\5\u00abV\2\u0270\u026f\3\2\2\2\u0271"+
|
||||
"\u0272\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u028d\3\2"+
|
||||
"\2\2\u0274\u0276\5\u00abV\2\u0275\u0274\3\2\2\2\u0276\u0277\3\2\2\2\u0277"+
|
||||
"\u0275\3\2\2\2\u0277\u0278\3\2\2\2\u0278\u0280\3\2\2\2\u0279\u027d\7\60"+
|
||||
"\2\2\u027a\u027c\5\u00abV\2\u027b\u027a\3\2\2\2\u027c\u027f\3\2\2\2\u027d"+
|
||||
"\u027b\3\2\2\2\u027d\u027e\3\2\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2"+
|
||||
"\2\2\u0280\u0279\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282"+
|
||||
"\u0283\5\u00a9U\2\u0283\u028d\3\2\2\2\u0284\u0286\7\60\2\2\u0285\u0287"+
|
||||
"\5\u00abV\2\u0286\u0285\3\2\2\2\u0287\u0288\3\2\2\2\u0288\u0286\3\2\2"+
|
||||
"\2\u0288\u0289\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u028b\5\u00a9U\2\u028b"+
|
||||
"\u028d\3\2\2\2\u028c\u0263\3\2\2\2\u028c\u026e\3\2\2\2\u028c\u0275\3\2"+
|
||||
"\2\2\u028c\u0284\3\2\2\2\u028d\u00a0\3\2\2\2\u028e\u0291\5\u00adW\2\u028f"+
|
||||
"\u0291\7a\2\2\u0290\u028e\3\2\2\2\u0290\u028f\3\2\2\2\u0291\u0297\3\2"+
|
||||
"\2\2\u0292\u0296\5\u00adW\2\u0293\u0296\5\u00abV\2\u0294\u0296\t\3\2\2"+
|
||||
"\u0295\u0292\3\2\2\2\u0295\u0293\3\2\2\2\u0295\u0294\3\2\2\2\u0296\u0299"+
|
||||
"\3\2\2\2\u0297\u0295\3\2\2\2\u0297\u0298\3\2\2\2\u0298\u00a2\3\2\2\2\u0299"+
|
||||
"\u0297\3\2\2\2\u029a\u029e\5\u00abV\2\u029b\u029f\5\u00adW\2\u029c\u029f"+
|
||||
"\5\u00abV\2\u029d\u029f\t\3\2\2\u029e\u029b\3\2\2\2\u029e\u029c\3\2\2"+
|
||||
"\2\u029e\u029d\3\2\2\2\u029f\u02a0\3\2\2\2\u02a0\u029e\3\2\2\2\u02a0\u02a1"+
|
||||
"\3\2\2\2\u02a1\u00a4\3\2\2\2\u02a2\u02a8\7$\2\2\u02a3\u02a7\n\4\2\2\u02a4"+
|
||||
"\u02a5\7$\2\2\u02a5\u02a7\7$\2\2\u02a6\u02a3\3\2\2\2\u02a6\u02a4\3\2\2"+
|
||||
"\2\u02a7\u02aa\3\2\2\2\u02a8\u02a6\3\2\2\2\u02a8\u02a9\3\2\2\2\u02a9\u02ab"+
|
||||
"\3\2\2\2\u02aa\u02a8\3\2\2\2\u02ab\u02ac\7$\2\2\u02ac\u00a6\3\2\2\2\u02ad"+
|
||||
"\u02b3\7b\2\2\u02ae\u02b2\n\5\2\2\u02af\u02b0\7b\2\2\u02b0\u02b2\7b\2"+
|
||||
"\2\u02b1\u02ae\3\2\2\2\u02b1\u02af\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1"+
|
||||
"\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u02b6\3\2\2\2\u02b5\u02b3\3\2\2\2\u02b6"+
|
||||
"\u02b7\7b\2\2\u02b7\u00a8\3\2\2\2\u02b8\u02ba\7G\2\2\u02b9\u02bb\t\6\2"+
|
||||
"\2\u02ba\u02b9\3\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02bd\3\2\2\2\u02bc\u02be"+
|
||||
"\5\u00abV\2\u02bd\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf\u02bd\3\2\2"+
|
||||
"\2\u02bf\u02c0\3\2\2\2\u02c0\u00aa\3\2\2\2\u02c1\u02c2\t\7\2\2\u02c2\u00ac"+
|
||||
"\3\2\2\2\u02c3\u02c4\t\b\2\2\u02c4\u00ae\3\2\2\2\u02c5\u02c6\7/\2\2\u02c6"+
|
||||
"\u02c7\7/\2\2\u02c7\u02cb\3\2\2\2\u02c8\u02ca\n\t\2\2\u02c9\u02c8\3\2"+
|
||||
"\2\2\u02ca\u02cd\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02cc\3\2\2\2\u02cc"+
|
||||
"\u02cf\3\2\2\2\u02cd\u02cb\3\2\2\2\u02ce\u02d0\7\17\2\2\u02cf\u02ce\3"+
|
||||
"\2\2\2\u02cf\u02d0\3\2\2\2\u02d0\u02d2\3\2\2\2\u02d1\u02d3\7\f\2\2\u02d2"+
|
||||
"\u02d1\3\2\2\2\u02d2\u02d3\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4\u02d5\bX"+
|
||||
"\2\2\u02d5\u00b0\3\2\2\2\u02d6\u02d7\7\61\2\2\u02d7\u02d8\7,\2\2\u02d8"+
|
||||
"\u02dd\3\2\2\2\u02d9\u02dc\5\u00b1Y\2\u02da\u02dc\13\2\2\2\u02db\u02d9"+
|
||||
"\3\2\2\2\u02db\u02da\3\2\2\2\u02dc\u02df\3\2\2\2\u02dd\u02de\3\2\2\2\u02dd"+
|
||||
"\u02db\3\2\2\2\u02de\u02e0\3\2\2\2\u02df\u02dd\3\2\2\2\u02e0\u02e1\7,"+
|
||||
"\2\2\u02e1\u02e2\7\61\2\2\u02e2\u02e3\3\2\2\2\u02e3\u02e4\bY\2\2\u02e4"+
|
||||
"\u00b2\3\2\2\2\u02e5\u02e7\t\n\2\2\u02e6\u02e5\3\2\2\2\u02e7\u02e8\3\2"+
|
||||
"\2\2\u02e8\u02e6\3\2\2\2\u02e8\u02e9\3\2\2\2\u02e9\u02ea\3\2\2\2\u02ea"+
|
||||
"\u02eb\bZ\2\2\u02eb\u00b4\3\2\2\2\u02ec\u02ed\13\2\2\2\u02ed\u00b6\3\2"+
|
||||
"\2\2 \2\u0239\u0256\u0258\u0260\u0265\u026b\u0272\u0277\u027d\u0280\u0288"+
|
||||
"\u028c\u0290\u0295\u0297\u029e\u02a0\u02a6\u02a8\u02b1\u02b3\u02ba\u02bf"+
|
||||
"\u02cb\u02cf\u02d2\u02db\u02dd\u02e8\3\2\3\2";
|
||||
public static final ATN _ATN =
|
||||
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
||||
static {
|
||||
|
|
|
@ -600,16 +600,6 @@ interface SqlBaseListener extends ParseTreeListener {
|
|||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by {@link SqlBaseParser#columnExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void enterColumnExpression(SqlBaseParser.ColumnExpressionContext ctx);
|
||||
/**
|
||||
* Exit a parse tree produced by {@link SqlBaseParser#columnExpression}.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
void exitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx);
|
||||
/**
|
||||
* Enter a parse tree produced by the {@code nullLiteral}
|
||||
* labeled alternative in {@link SqlBaseParser#constant}.
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -361,12 +361,6 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
|
|||
* @return the visitor result
|
||||
*/
|
||||
T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by {@link SqlBaseParser#columnExpression}.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
T visitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx);
|
||||
/**
|
||||
* Visit a parse tree produced by the {@code nullLiteral}
|
||||
* labeled alternative in {@link SqlBaseParser#constant}.
|
||||
|
|
|
@ -6,24 +6,18 @@
|
|||
package org.elasticsearch.xpack.sql.plan.logical;
|
||||
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.CompoundDataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.NestedType;
|
||||
import org.elasticsearch.xpack.sql.type.StringType;
|
||||
import org.elasticsearch.xpack.sql.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine;
|
||||
|
||||
public class EsRelation extends LeafPlan {
|
||||
|
||||
|
@ -33,32 +27,30 @@ public class EsRelation extends LeafPlan {
|
|||
public EsRelation(Location location, EsIndex index) {
|
||||
super(location);
|
||||
this.index = index;
|
||||
attrs = flatten(location, index.mapping()).collect(toList());
|
||||
attrs = flatten(location, index.mapping());
|
||||
}
|
||||
|
||||
private static Stream<Attribute> flatten(Location location, Map<String, DataType> mapping) {
|
||||
return flatten(location, mapping, null, emptyList());
|
||||
private static List<Attribute> flatten(Location location, Map<String, DataType> mapping) {
|
||||
return flatten(location, mapping, null);
|
||||
}
|
||||
|
||||
private static Stream<Attribute> flatten(Location location, Map<String, DataType> mapping, String parent, List<String> nestedParents) {
|
||||
return mapping.entrySet().stream()
|
||||
.filter(e -> e.getValue() != null)
|
||||
.flatMap(e -> {
|
||||
String name = parent != null ? parent + "." + e.getKey() : e.getKey();
|
||||
DataType t = e.getValue();
|
||||
if (t.isComplex() && !(t instanceof StringType)) {
|
||||
if (t instanceof NestedType) {
|
||||
return Stream.concat(Stream.of(new NestedFieldAttribute(location, name, t, nestedParents)), flatten(location, ((NestedType) t).properties(), name, combine(nestedParents, name)));
|
||||
}
|
||||
// if (t instanceof ObjectType) {
|
||||
// return flatten(location, ((ObjectType) t).properties(), name, combine(nestedParents, name));
|
||||
// }
|
||||
|
||||
throw new MappingException("Does not know how to handle complex type %s", t);
|
||||
}
|
||||
Attribute att = nestedParents.isEmpty() ? new RootFieldAttribute(location, name, t) : new NestedFieldAttribute(location, name, t, nestedParents);
|
||||
return Stream.of(att);
|
||||
});
|
||||
private static List<Attribute> flatten(Location location, Map<String, DataType> mapping, FieldAttribute parent) {
|
||||
List<Attribute> list = new ArrayList<>();
|
||||
|
||||
for (Entry<String, DataType> entry : mapping.entrySet()) {
|
||||
String name = entry.getKey();
|
||||
DataType t = entry.getValue();
|
||||
|
||||
if (t != null) {
|
||||
FieldAttribute f = new FieldAttribute(location, parent, parent != null ? parent.name() + "." + name : name, t);
|
||||
list.add(f);
|
||||
// object or nested
|
||||
if (t instanceof CompoundDataType) {
|
||||
list.addAll(flatten(location, ((CompoundDataType) t).properties(), f));
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public EsIndex index() {
|
||||
|
|
|
@ -5,13 +5,14 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.plan.logical;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
public class SubQueryAlias extends UnaryPlan {
|
||||
|
||||
private final String alias;
|
||||
|
@ -30,7 +31,7 @@ public class SubQueryAlias extends UnaryPlan {
|
|||
return (alias == null ? child().output() :
|
||||
child().output().stream()
|
||||
.map(e -> e.withQualifier(alias))
|
||||
.collect(Collectors.toList())
|
||||
.collect(toList())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.rule.RuleExecutor.Batch;
|
||||
import org.elasticsearch.xpack.sql.rule.RuleExecutor.ExecutionInfo;
|
||||
|
@ -65,7 +65,7 @@ public class Debug extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return singletonList(new RootFieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
||||
return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.plan.QueryPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan;
|
||||
|
@ -76,7 +76,7 @@ public class Explain extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return singletonList(new RootFieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
||||
return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -157,7 +157,7 @@ public class Explain extends Command {
|
|||
|
||||
listener.onResponse(Rows.singleton(output(), printPlans(format, plan, analyzedPlan, optimizedPlan, mappedPlan, null)));
|
||||
}, listener::onFailure));
|
||||
// cannot continue
|
||||
// cannot continue
|
||||
} else {
|
||||
if (type != Type.ALL) {
|
||||
listener.onResponse(Rows.singleton(output(), formatPlan(format, analyzedPlan)));
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.session.Rows;
|
||||
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
|
@ -40,8 +40,8 @@ public class ShowColumns extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return asList(new RootFieldAttribute(location(), "column", DataTypes.KEYWORD),
|
||||
new RootFieldAttribute(location(), "type", DataTypes.KEYWORD));
|
||||
return asList(new FieldAttribute(location(), "column", DataTypes.KEYWORD),
|
||||
new FieldAttribute(location(), "type", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -56,7 +56,7 @@ public class ShowColumns extends Command {
|
|||
listener.onResponse(Rows.of(output(), rows));
|
||||
},
|
||||
listener::onFailure
|
||||
));
|
||||
));
|
||||
}
|
||||
|
||||
private void fillInRows(Map<String, DataType> mapping, String prefix, List<List<?>> rows) {
|
||||
|
|
|
@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionDefinition;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.session.Rows;
|
||||
|
@ -38,8 +38,8 @@ public class ShowFunctions extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return asList(new RootFieldAttribute(location(), "name", DataTypes.KEYWORD),
|
||||
new RootFieldAttribute(location(), "type", DataTypes.KEYWORD));
|
||||
return asList(new FieldAttribute(location(), "name", DataTypes.KEYWORD),
|
||||
new FieldAttribute(location(), "type", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -7,8 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command;
|
|||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.session.RowSet;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.session.Rows;
|
||||
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
|
@ -27,7 +26,7 @@ public class ShowSchemas extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return singletonList(new RootFieldAttribute(location(), "schema", DataTypes.KEYWORD));
|
||||
return singletonList(new FieldAttribute(location(), "schema", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.session.Rows;
|
||||
import org.elasticsearch.xpack.sql.session.SchemaRowSet;
|
||||
import org.elasticsearch.xpack.sql.session.SqlSession;
|
||||
|
@ -40,7 +40,7 @@ public class ShowTables extends Command {
|
|||
|
||||
@Override
|
||||
public List<Attribute> output() {
|
||||
return Collections.singletonList(new RootFieldAttribute(location(), "table", DataTypes.KEYWORD));
|
||||
return Collections.singletonList(new FieldAttribute(location(), "table", DataTypes.KEYWORD));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -13,8 +13,6 @@ import org.elasticsearch.xpack.sql.expression.ExpressionId;
|
|||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.UnaryExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Function;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Functions;
|
||||
|
@ -261,8 +259,8 @@ abstract class QueryTranslator {
|
|||
// change analyzed to non non-analyzed attributes
|
||||
if (exp instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) exp;
|
||||
if (fa.isAnalyzed()) {
|
||||
ne = fa.notAnalyzedAttribute();
|
||||
if (fa.isInexact()) {
|
||||
ne = fa.exactAttribute();
|
||||
}
|
||||
}
|
||||
aggId = ne.id().toString();
|
||||
|
@ -415,8 +413,8 @@ abstract class QueryTranslator {
|
|||
|
||||
static String field(AggregateFunction af) {
|
||||
Expression arg = af.field();
|
||||
if (arg instanceof RootFieldAttribute) {
|
||||
return ((RootFieldAttribute) arg).name();
|
||||
if (arg instanceof FieldAttribute) {
|
||||
return ((FieldAttribute) arg).name();
|
||||
}
|
||||
if (arg instanceof Literal) {
|
||||
return String.valueOf(((Literal) arg).value());
|
||||
|
@ -431,18 +429,18 @@ abstract class QueryTranslator {
|
|||
@Override
|
||||
protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) {
|
||||
Query q = null;
|
||||
boolean analyzed = true;
|
||||
boolean inexact = true;
|
||||
String target = null;
|
||||
|
||||
if (e.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) e.left();
|
||||
analyzed = fa.isAnalyzed();
|
||||
target = nameOf(analyzed ? fa : fa.notAnalyzedAttribute());
|
||||
inexact = fa.isInexact();
|
||||
target = nameOf(inexact ? fa : fa.exactAttribute());
|
||||
}
|
||||
|
||||
String pattern = sqlToEsPatternMatching(stringValueOf(e.right()));
|
||||
if (e instanceof Like) {
|
||||
if (analyzed) {
|
||||
if (inexact) {
|
||||
q = new QueryStringQuery(e.location(), pattern, target);
|
||||
}
|
||||
else {
|
||||
|
@ -451,7 +449,7 @@ abstract class QueryTranslator {
|
|||
}
|
||||
|
||||
if (e instanceof RLike) {
|
||||
if (analyzed) {
|
||||
if (inexact) {
|
||||
q = new QueryStringQuery(e.location(), "/" + pattern + "/", target);
|
||||
}
|
||||
else {
|
||||
|
@ -622,7 +620,7 @@ abstract class QueryTranslator {
|
|||
if (bc instanceof Equals) {
|
||||
if (bc.left() instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) bc.left();
|
||||
if (fa.isAnalyzed()) {
|
||||
if (fa.isInexact()) {
|
||||
return new MatchQuery(loc, name, value);
|
||||
}
|
||||
}
|
||||
|
@ -870,9 +868,11 @@ abstract class QueryTranslator {
|
|||
protected abstract QueryTranslation asQuery(E e, boolean onAggs);
|
||||
|
||||
protected static Query wrapIfNested(Query query, Expression exp) {
|
||||
if (exp instanceof NestedFieldAttribute) {
|
||||
NestedFieldAttribute nfa = (NestedFieldAttribute) exp;
|
||||
return new NestedQuery(nfa.location(), nfa.parentPath(), query);
|
||||
if (exp instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) exp;
|
||||
if (fa.isNested()) {
|
||||
return new NestedQuery(fa.location(), fa.nestedParent().name(), query);
|
||||
}
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.querydsl.container;
|
||||
|
||||
import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
|
||||
|
||||
|
||||
public class NestedFieldRef implements FieldReference {
|
||||
private final String parent, name;
|
||||
private final boolean docValue;
|
||||
|
||||
public NestedFieldRef(String parent, String name, boolean useDocValueInsteadOfSource) {
|
||||
this.parent = parent;
|
||||
this.name = name;
|
||||
this.docValue = useDocValueInsteadOfSource;
|
||||
}
|
||||
|
||||
public String parent() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public boolean useDocValue() {
|
||||
return docValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void collectFields(SqlSourceBuilder sourceBuilder) {
|
||||
throw new IllegalStateException("unhandled nested field while collecting source fields [" + getClass() + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -12,9 +12,8 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|||
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||
import org.elasticsearch.xpack.sql.execution.search.SourceGenerator;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.LiteralAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.ScoreAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AttributeInput;
|
||||
|
@ -197,15 +196,15 @@ public class QueryContainer {
|
|||
//
|
||||
// reference methods
|
||||
//
|
||||
private ColumnReference fieldRef(RootFieldAttribute fieldAttr) {
|
||||
private ColumnReference searchHitFieldRef(FieldAttribute fieldAttr) {
|
||||
return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues());
|
||||
}
|
||||
|
||||
private Tuple<QueryContainer, ColumnReference> nestedFieldRef(NestedFieldAttribute attr) {
|
||||
private Tuple<QueryContainer, ColumnReference> nestedFieldRef(FieldAttribute attr) {
|
||||
// attach the field to the relevant nested query
|
||||
List<ColumnReference> nestedRefs = new ArrayList<>();
|
||||
|
||||
String parent = attr.parentPath();
|
||||
String parent = attr.nestedParent().name();
|
||||
String name = aliasName(attr);
|
||||
|
||||
Query q = query;
|
||||
|
@ -234,7 +233,7 @@ public class QueryContainer {
|
|||
}
|
||||
}
|
||||
|
||||
NestedFieldRef nestedFieldRef = new NestedFieldRef(attr.parentPath(), attr.name(), attr.dataType().hasDocValues());
|
||||
SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(attr.name(), attr.dataType().hasDocValues(), parent);
|
||||
nestedRefs.add(nestedFieldRef);
|
||||
|
||||
return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef);
|
||||
|
@ -277,11 +276,13 @@ public class QueryContainer {
|
|||
}
|
||||
|
||||
private Tuple<QueryContainer, ColumnReference> toReference(Attribute attr) {
|
||||
if (attr instanceof RootFieldAttribute) {
|
||||
return new Tuple<>(this, fieldRef((RootFieldAttribute) attr));
|
||||
}
|
||||
if (attr instanceof NestedFieldAttribute) {
|
||||
return nestedFieldRef((NestedFieldAttribute) attr);
|
||||
if (attr instanceof FieldAttribute) {
|
||||
FieldAttribute fa = (FieldAttribute) attr;
|
||||
if (fa.isNested()) {
|
||||
return nestedFieldRef(fa);
|
||||
} else {
|
||||
return new Tuple<>(this, searchHitFieldRef(fa));
|
||||
}
|
||||
}
|
||||
if (attr instanceof ScalarFunctionAttribute) {
|
||||
return computingRef((ScalarFunctionAttribute) attr);
|
||||
|
|
|
@ -10,10 +10,20 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder;
|
|||
public class SearchHitFieldRef implements FieldReference {
|
||||
private final String name;
|
||||
private final boolean docValue;
|
||||
private final String hitName;
|
||||
|
||||
public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource) {
|
||||
this(name, useDocValueInsteadOfSource, null);
|
||||
}
|
||||
|
||||
public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource, String hitName) {
|
||||
this.name = name;
|
||||
this.docValue = useDocValueInsteadOfSource;
|
||||
this.hitName = hitName;
|
||||
}
|
||||
|
||||
public String hitName() {
|
||||
return hitName;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest;
|
|||
import org.elasticsearch.xpack.sql.protocol.shared.Nullable;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
// Typed object holding properties for a given
|
||||
// Typed object holding properties for a given action
|
||||
public class Configuration {
|
||||
public static final Configuration DEFAULT = new Configuration(DateTimeZone.UTC,
|
||||
AbstractQueryInitRequest.DEFAULT_FETCH_SIZE,
|
||||
|
@ -24,6 +24,7 @@ public class Configuration {
|
|||
private int pageSize;
|
||||
private TimeValue requestTimeout;
|
||||
private TimeValue pageTimeout;
|
||||
|
||||
@Nullable
|
||||
private QueryBuilder filter;
|
||||
|
||||
|
|
|
@ -7,39 +7,48 @@ package org.elasticsearch.xpack.sql.type;
|
|||
|
||||
import java.sql.JDBCType;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public interface CompoundDataType extends DataType {
|
||||
public abstract class CompoundDataType extends AbstractDataType {
|
||||
|
||||
@Override
|
||||
default JDBCType sqlType() {
|
||||
return JDBCType.STRUCT;
|
||||
private final Map<String, DataType> properties;
|
||||
|
||||
CompoundDataType(JDBCType sqlType, boolean hasDocValues, Map<String, DataType> properties) {
|
||||
super(sqlType, hasDocValues);
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public Map<String, DataType> properties() {
|
||||
return properties;
|
||||
}
|
||||
|
||||
@Override
|
||||
default int precision() {
|
||||
public int precision() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
default boolean isInteger() {
|
||||
public boolean isInteger() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
default boolean isRational() {
|
||||
public boolean isRational() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
default boolean isPrimitive() {
|
||||
public boolean isPrimitive() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
default boolean hasDocValues() {
|
||||
return false;
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), Objects.hash(properties));
|
||||
}
|
||||
|
||||
Map<String, DataType> properties();
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && Objects.equals(properties, ((CompoundDataType) obj).properties);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,10 +55,6 @@ public interface DataType {
|
|||
return isInteger() || isRational();
|
||||
}
|
||||
|
||||
default boolean isComplex() {
|
||||
return !isPrimitive();
|
||||
}
|
||||
|
||||
boolean isPrimitive();
|
||||
|
||||
default boolean same(DataType other) {
|
||||
|
|
|
@ -71,17 +71,17 @@ public abstract class DataTypeConversion {
|
|||
|
||||
public static boolean canConvert(DataType from, DataType to) { // TODO it'd be cleaner and more right to fetch the conversion
|
||||
// only primitives are supported so far
|
||||
if (from.isComplex() || to.isComplex()) {
|
||||
if (!from.isPrimitive() || !to.isPrimitive()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
if (from.getClass() == to.getClass()) {
|
||||
return true;
|
||||
}
|
||||
if (from instanceof NullType) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// anything can be converted to String
|
||||
if (to instanceof StringType) {
|
||||
return true;
|
||||
|
|
|
@ -13,13 +13,25 @@ import static java.util.Collections.emptyMap;
|
|||
public class KeywordType extends StringType {
|
||||
|
||||
static final int DEFAULT_LENGTH = 256;
|
||||
static final KeywordType DEFAULT = new KeywordType(true, DEFAULT_LENGTH, emptyMap());
|
||||
static final boolean DEFAULT_NORMALIZED = false;
|
||||
static final KeywordType DEFAULT = new KeywordType(true, DEFAULT_LENGTH, DEFAULT_NORMALIZED, emptyMap());
|
||||
|
||||
private final int length;
|
||||
|
||||
KeywordType(boolean docValues, int length, Map<String, DataType> fields) {
|
||||
private final boolean normalized;
|
||||
|
||||
KeywordType(boolean docValues, int length, boolean normalized, Map<String, DataType> fields) {
|
||||
super(docValues, fields);
|
||||
this.length = length;
|
||||
this.normalized = normalized;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInexact() {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
public boolean isNormalized() {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -32,28 +44,19 @@ public class KeywordType extends StringType {
|
|||
return length;
|
||||
}
|
||||
|
||||
static DataType from(boolean docValues, int length, Map<String, DataType> fields) {
|
||||
return docValues && length == DEFAULT_LENGTH && fields.isEmpty() ? DEFAULT : new KeywordType(docValues, length, fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(length, hasDocValues(), fields());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
KeywordType other = (KeywordType) obj;
|
||||
return Objects.equals(hasDocValues(), other.hasDocValues())
|
||||
&& Objects.equals(length, other.length)
|
||||
&& Objects.equals(fields(), other.fields());
|
||||
return super.equals(obj) && length == ((KeywordType) obj).length;
|
||||
}
|
||||
|
||||
static DataType from(boolean docValues, int length, boolean normalized, Map<String, DataType> fields) {
|
||||
return docValues && length == DEFAULT_LENGTH && fields.isEmpty() && normalized == DEFAULT_NORMALIZED
|
||||
? DEFAULT
|
||||
: new KeywordType(docValues, length, normalized, fields);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,19 +5,13 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.type;
|
||||
|
||||
import java.sql.JDBCType;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class NestedType implements CompoundDataType {
|
||||
|
||||
private final Map<String, DataType> properties;
|
||||
public class NestedType extends CompoundDataType {
|
||||
|
||||
public NestedType(Map<String, DataType> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public Map<String, DataType> properties() {
|
||||
return properties;
|
||||
super(JDBCType.STRUCT, false, properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -25,27 +19,8 @@ public class NestedType implements CompoundDataType {
|
|||
return "nested";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
NestedType other = (NestedType) obj;
|
||||
return Objects.equals(properties, other.properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getClass().getSimpleName() + "[" + esName() + "|" + sqlName() + "]=" + properties;
|
||||
return "N" + properties();
|
||||
}
|
||||
}
|
|
@ -5,23 +5,17 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.sql.type;
|
||||
|
||||
import java.sql.JDBCType;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public class ObjectType implements CompoundDataType {
|
||||
public class ObjectType extends CompoundDataType {
|
||||
|
||||
public static final ObjectType EMPTY = new ObjectType(emptyMap());
|
||||
|
||||
private final Map<String, DataType> properties;
|
||||
|
||||
public ObjectType(Map<String, DataType> properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
public Map<String, DataType> properties() {
|
||||
return properties;
|
||||
ObjectType(Map<String, DataType> properties) {
|
||||
super(JDBCType.STRUCT, false, properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,21 +24,7 @@ public class ObjectType implements CompoundDataType {
|
|||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ObjectType other = (ObjectType) obj;
|
||||
return Objects.equals(properties, other.properties);
|
||||
public String toString() {
|
||||
return "O" + properties();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,56 +8,75 @@ package org.elasticsearch.xpack.sql.type;
|
|||
import java.sql.JDBCType;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.stream.Collectors.toMap;
|
||||
|
||||
public abstract class StringType implements DataType {
|
||||
// String type is a special type of CompoundDataType
|
||||
public abstract class StringType extends CompoundDataType {
|
||||
|
||||
private final boolean docValue;
|
||||
private final Map<String, DataType> fields;
|
||||
private final Map<String, DataType> docValueFields;
|
||||
private final Map<String, KeywordType> exactKeywords;
|
||||
|
||||
|
||||
StringType(boolean docValue, Map<String, DataType> fields) {
|
||||
super(JDBCType.VARCHAR, docValue, fields);
|
||||
|
||||
this.docValue = docValue;
|
||||
this.fields = fields;
|
||||
|
||||
if (docValue || fields.isEmpty()) {
|
||||
docValueFields = emptyMap();
|
||||
exactKeywords = emptyMap();
|
||||
} else {
|
||||
docValueFields = fields.entrySet().stream()
|
||||
.filter(e -> e.getValue().hasDocValues())
|
||||
.collect(toMap(
|
||||
Map.Entry::getKey,
|
||||
Map.Entry::getValue,
|
||||
(k1, k2) -> {
|
||||
throw new IllegalStateException("Duplicate key " + k1);
|
||||
},
|
||||
LinkedHashMap::new));
|
||||
exactKeywords = new LinkedHashMap<>();
|
||||
for (Entry<String, DataType> entry : fields.entrySet()) {
|
||||
DataType t = entry.getValue();
|
||||
// consider only non-normalized keywords
|
||||
if (t instanceof KeywordType) {
|
||||
KeywordType kt = (KeywordType) t;
|
||||
if (!kt.isNormalized()) {
|
||||
exactKeywords.put(entry.getKey(), kt);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public JDBCType sqlType() {
|
||||
return JDBCType.VARCHAR;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasDocValues() {
|
||||
return docValue;
|
||||
}
|
||||
public abstract boolean isInexact();
|
||||
|
||||
public Map<String, DataType> fields() {
|
||||
return fields;
|
||||
return properties();
|
||||
}
|
||||
|
||||
public Map<String, DataType> docValueFields() {
|
||||
return docValueFields;
|
||||
public Map<String, KeywordType> exactKeywords() {
|
||||
return exactKeywords;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPrimitive() {
|
||||
return fields.isEmpty();
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int precision() {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(docValue, fields);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (super.equals(obj)) {
|
||||
StringType other = (StringType) obj;
|
||||
return Objects.equals(docValue, other.docValue)
|
||||
&& Objects.equals(fields(), other.fields());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
package org.elasticsearch.xpack.sql.type;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
@ -29,11 +30,26 @@ public class TextType extends StringType {
|
|||
return fieldData;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInexact() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String esName() {
|
||||
return "text";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(super.hashCode(), fieldData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return super.equals(obj) && fieldData == ((TextType) obj).fieldData;
|
||||
}
|
||||
|
||||
static DataType from(boolean fieldData, Map<String, DataType> fields) {
|
||||
return DEFAULT.fieldData == fieldData && DEFAULT.fields().equals(fields) ? DEFAULT : new TextType(fieldData, fields);
|
||||
}
|
||||
|
|
|
@ -9,38 +9,71 @@ import org.elasticsearch.common.Booleans;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.lang.Math.floor;
|
||||
import static java.lang.Math.log10;
|
||||
import static java.lang.Math.round;
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableSet;
|
||||
|
||||
public abstract class Types {
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Map<String, DataType> fromEs(Map<String, Object> asMap) {
|
||||
Map<String, Object> props = (Map<String, Object>) asMap.get("properties");
|
||||
return props == null || props.isEmpty() ? emptyMap() : startWalking(props);
|
||||
private static final Set<String> KNOWN_TYPES;
|
||||
|
||||
static {
|
||||
Set<String> types = new HashSet<>();
|
||||
types.add("text");
|
||||
types.add("keyword");
|
||||
types.add("long");
|
||||
types.add("integer");
|
||||
types.add("short");
|
||||
types.add("byte");
|
||||
types.add("double");
|
||||
types.add("float");
|
||||
types.add("half_float");
|
||||
types.add("scaled_float");
|
||||
types.add("date");
|
||||
types.add("boolean");
|
||||
types.add("binary");
|
||||
types.add("object");
|
||||
types.add("nested");
|
||||
|
||||
KNOWN_TYPES = unmodifiableSet(types);
|
||||
}
|
||||
|
||||
private static Map<String, DataType> startWalking(Map<String, Object> mapping) {
|
||||
Map<String, DataType> translated = new LinkedHashMap<>();
|
||||
public static Map<String, DataType> fromEs(Map<String, Object> asMap) {
|
||||
return fromEs(asMap, false);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static Map<String, DataType> fromEs(Map<String, Object> asMap, boolean ignoreUnsupported) {
|
||||
Map<String, Object> props = null;
|
||||
if (asMap != null && !asMap.isEmpty()) {
|
||||
props = (Map<String, Object>) asMap.get("properties");
|
||||
}
|
||||
return props == null || props.isEmpty() ? emptyMap() : startWalking(props, ignoreUnsupported);
|
||||
}
|
||||
|
||||
private static Map<String, DataType> startWalking(Map<String, Object> mapping, boolean ignoreUnsupported) {
|
||||
Map<String, DataType> types = new LinkedHashMap<>();
|
||||
|
||||
if (mapping == null) {
|
||||
return emptyMap();
|
||||
}
|
||||
for (Entry<String, Object> entry : mapping.entrySet()) {
|
||||
walkMapping(entry.getKey(), entry.getValue(), translated);
|
||||
walkMapping(entry.getKey(), entry.getValue(), types, ignoreUnsupported);
|
||||
}
|
||||
|
||||
return translated;
|
||||
return types;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static void walkMapping(String name, Object value, Map<String, DataType> mapping) {
|
||||
private static void walkMapping(String name, Object value, Map<String, DataType> mapping, boolean ignoreUnsupported) {
|
||||
// object type - only root or nested docs supported
|
||||
if (value instanceof Map) {
|
||||
Map<String, Object> content = (Map<String, Object>) value;
|
||||
|
@ -50,40 +83,44 @@ public abstract class Types {
|
|||
if (type instanceof String) {
|
||||
String st = type.toString();
|
||||
|
||||
if (isNested(st)) {
|
||||
mapping.put(name, new NestedType(fromEs(content)));
|
||||
return;
|
||||
}
|
||||
|
||||
if (isPrimitive(st)) {
|
||||
// check dates first to account for the format
|
||||
mapping.put(name, createPrimitiveType(st, content));
|
||||
return;
|
||||
}
|
||||
|
||||
else {
|
||||
throw new MappingException("Don't know how to parse entry %s in map %s", type, content);
|
||||
if (knownType(st)) {
|
||||
if (isNested(st)) {
|
||||
mapping.put(name, new NestedType(fromEs(content)));
|
||||
} else {
|
||||
// check dates first to account for the format
|
||||
mapping.put(name, createPrimitiveType(st, content, ignoreUnsupported));
|
||||
}
|
||||
} else {
|
||||
if (!ignoreUnsupported) {
|
||||
throw new MappingException("Unsupported mapping type %s", type);
|
||||
}
|
||||
}
|
||||
}
|
||||
// object type ignored
|
||||
}
|
||||
else {
|
||||
throw new MappingException("Don't know how to parse mapping %s", value);
|
||||
// object type ?
|
||||
else if (type == null && content.containsKey("properties")) {
|
||||
mapping.put(name, new ObjectType(fromEs(content)));
|
||||
}
|
||||
// bail out
|
||||
else {
|
||||
throw new MappingException("Unsupported mapping %s", type);
|
||||
}
|
||||
} else {
|
||||
throw new MappingException("Unrecognized mapping %s", value);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static DataType createPrimitiveType(String typeString, Map<String, Object> content) {
|
||||
private static DataType createPrimitiveType(String typeString, Map<String, Object> content, boolean ignoreUnsupported) {
|
||||
// since this setting is available in most types, search for it regardless
|
||||
|
||||
|
||||
DataType type = null;
|
||||
|
||||
boolean docValues = boolSetting(content.get("doc_values"), true);
|
||||
|
||||
boolean docValues = boolSetting(content.get("doc_values"), true);
|
||||
switch (typeString) {
|
||||
case "date":
|
||||
Object fmt = content.get("format");
|
||||
if (fmt != null) {
|
||||
type = new DateType(docValues, Strings.split(fmt.toString(), "||"));
|
||||
type = new DateType(docValues, Strings.delimitedListToStringArray(fmt.toString(), "||"));
|
||||
}
|
||||
else {
|
||||
type = docValues ? DateType.DEFAULT : new DateType(false);
|
||||
|
@ -94,18 +131,19 @@ public abstract class Types {
|
|||
Object value = content.get("fields");
|
||||
Map<String, DataType> fields = emptyMap();
|
||||
if (value instanceof Map) {
|
||||
fields = startWalking((Map<String, Object>) value);
|
||||
fields = startWalking((Map<String, Object>) value, ignoreUnsupported);
|
||||
}
|
||||
type = TextType.from(fieldData, fields);
|
||||
break;
|
||||
case "keyword":
|
||||
int length = intSetting(content.get("ignore_above"), KeywordType.DEFAULT_LENGTH);
|
||||
boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null));
|
||||
fields = emptyMap();
|
||||
value = content.get("fields");
|
||||
if (value instanceof Map) {
|
||||
fields = startWalking((Map<String, Object>) value);
|
||||
fields = startWalking((Map<String, Object>) value, ignoreUnsupported);
|
||||
}
|
||||
type = KeywordType.from(docValues, length, fields);
|
||||
type = KeywordType.from(docValues, length, normalized, fields);
|
||||
break;
|
||||
default:
|
||||
type = DataTypes.fromEsName(typeString, docValues);
|
||||
|
@ -114,6 +152,10 @@ public abstract class Types {
|
|||
return type;
|
||||
}
|
||||
|
||||
private static String textSetting(Object value, String defaultValue) {
|
||||
return value == null ? defaultValue : value.toString();
|
||||
}
|
||||
|
||||
private static boolean boolSetting(Object value, boolean defaultValue) {
|
||||
return value == null ? defaultValue : Booleans.parseBoolean(value.toString(), defaultValue);
|
||||
}
|
||||
|
@ -122,8 +164,8 @@ public abstract class Types {
|
|||
return value == null ? defaultValue : Integer.parseInt(value.toString());
|
||||
}
|
||||
|
||||
private static boolean isPrimitive(String string) {
|
||||
return !isNested(string);
|
||||
private static boolean knownType(String st) {
|
||||
return KNOWN_TYPES.contains(st);
|
||||
}
|
||||
|
||||
private static boolean isNested(String type) {
|
||||
|
|
|
@ -18,14 +18,8 @@ public class UnknownDataType extends AbstractDataType {
|
|||
return "unknown";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isComplex() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isPrimitive() {
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.util.List;
|
|||
import java.util.Locale;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.stream.Collectors.joining;
|
||||
import static java.util.stream.Collectors.toList;
|
||||
|
||||
public abstract class StringUtils {
|
||||
|
@ -62,13 +61,6 @@ public abstract class StringUtils {
|
|||
}
|
||||
}
|
||||
|
||||
public static String concatWithDot(List<String> strings) {
|
||||
if (strings == null || strings.isEmpty()) {
|
||||
return EMPTY;
|
||||
}
|
||||
return strings.stream().collect(joining("."));
|
||||
}
|
||||
|
||||
//CamelCase to camel_case
|
||||
public static String camelCaseToUnderscore(String string) {
|
||||
if (!Strings.hasText(string)) {
|
||||
|
@ -195,7 +187,7 @@ public abstract class StringUtils {
|
|||
for (String potentialMatch : potentialMatches) {
|
||||
float distance = ld.getDistance(match, potentialMatch);
|
||||
if (distance >= 0.5f) {
|
||||
scoredMatches.add(new Tuple<>(distance, potentialMatch));
|
||||
scoredMatches.add(new Tuple<>(distance, potentialMatch));
|
||||
}
|
||||
}
|
||||
CollectionUtil.timSort(scoredMatches, (a,b) -> b.v1().compareTo(a.v1()));
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.analysis.analyzer;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.GetIndexResult;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
import org.elasticsearch.xpack.sql.expression.Attribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.NamedExpression;
|
||||
import org.elasticsearch.xpack.sql.expression.function.DefaultFunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
|
||||
import org.elasticsearch.xpack.sql.plan.logical.Project;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.KeywordType;
|
||||
import org.elasticsearch.xpack.sql.type.TextType;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.BOOLEAN;
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class FieldAttributeTests extends ESTestCase {
|
||||
|
||||
private SqlParser parser;
|
||||
private GetIndexResult getIndexResult;
|
||||
private FunctionRegistry functionRegistry;
|
||||
private Analyzer analyzer;
|
||||
|
||||
public FieldAttributeTests() {
|
||||
parser = new SqlParser(DateTimeZone.UTC);
|
||||
functionRegistry = new DefaultFunctionRegistry();
|
||||
|
||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
getIndexResult = GetIndexResult.valid(test);
|
||||
analyzer = new Analyzer(functionRegistry, getIndexResult, DateTimeZone.UTC);
|
||||
}
|
||||
|
||||
private LogicalPlan plan(String sql) {
|
||||
return analyzer.analyze(parser.createStatement(sql), true);
|
||||
}
|
||||
|
||||
private FieldAttribute attribute(String fieldName) {
|
||||
LogicalPlan plan = plan("SELECT " + fieldName + " FROM test");
|
||||
assertThat(plan, instanceOf(Project.class));
|
||||
Project p = (Project) plan;
|
||||
List<? extends NamedExpression> projections = p.projections();
|
||||
assertThat(projections, hasSize(1));
|
||||
Attribute attribute = projections.get(0).toAttribute();
|
||||
assertThat(attribute, instanceOf(FieldAttribute.class));
|
||||
return (FieldAttribute) attribute;
|
||||
}
|
||||
|
||||
private String error(String fieldName) {
|
||||
VerificationException ve = expectThrows(VerificationException.class, () -> plan("SELECT " + fieldName + " FROM test"));
|
||||
return ve.getMessage();
|
||||
}
|
||||
|
||||
public void testRootField() {
|
||||
FieldAttribute attr = attribute("bool");
|
||||
assertThat(attr.name(), is("bool"));
|
||||
assertThat(attr.dataType(), is(BOOLEAN));
|
||||
}
|
||||
|
||||
public void testDottedField() {
|
||||
FieldAttribute attr = attribute("some.dotted.field");
|
||||
assertThat(attr.path(), is("some.dotted"));
|
||||
assertThat(attr.name(), is("some.dotted.field"));
|
||||
assertThat(attr.dataType(), is(KEYWORD));
|
||||
}
|
||||
|
||||
public void testExactKeyword() {
|
||||
FieldAttribute attr = attribute("some.string");
|
||||
assertThat(attr.path(), is("some"));
|
||||
assertThat(attr.name(), is("some.string"));
|
||||
assertThat(attr.dataType(), instanceOf(TextType.class));
|
||||
assertThat(attr.isInexact(), is(true));
|
||||
FieldAttribute exact = attr.exactAttribute();
|
||||
assertThat(exact.isInexact(), is(false));
|
||||
assertThat(exact.name(), is("some.string.typical"));
|
||||
assertThat(exact.dataType(), instanceOf(KeywordType.class));
|
||||
}
|
||||
|
||||
public void testAmbiguousExactKeyword() {
|
||||
FieldAttribute attr = attribute("some.ambiguous");
|
||||
assertThat(attr.path(), is("some"));
|
||||
assertThat(attr.name(), is("some.ambiguous"));
|
||||
assertThat(attr.dataType(), instanceOf(TextType.class));
|
||||
assertThat(attr.isInexact(), is(true));
|
||||
MappingException me = expectThrows(MappingException.class, () -> attr.exactAttribute());
|
||||
assertThat(me.getMessage(),
|
||||
is("Multiple exact keyword candidates [one, two] available for some.ambiguous; specify which one to use"));
|
||||
}
|
||||
|
||||
public void testNormalizedKeyword() {
|
||||
FieldAttribute attr = attribute("some.string.normalized");
|
||||
assertThat(attr.path(), is("some.string"));
|
||||
assertThat(attr.name(), is("some.string.normalized"));
|
||||
assertThat(attr.dataType(), instanceOf(KeywordType.class));
|
||||
assertThat(attr.isInexact(), is(true));
|
||||
}
|
||||
|
||||
public void testDottedFieldPath() {
|
||||
assertThat(error("some"), is("Found 1 problem(s)\nline 1:8: Cannot use field [some] (type object) only its subfields"));
|
||||
}
|
||||
|
||||
public void testDottedFieldPathDeeper() {
|
||||
assertThat(error("some.dotted"),
|
||||
is("Found 1 problem(s)\nline 1:8: Cannot use field [some.dotted] (type object) only its subfields"));
|
||||
}
|
||||
|
||||
public void testDottedFieldPathTypo() {
|
||||
assertThat(error("some.dotted.fild"),
|
||||
is("Found 1 problem(s)\nline 1:8: Unknown column [some.dotted.fild], did you mean [some.dotted.field]?"));
|
||||
}
|
||||
}
|
|
@ -6,29 +6,22 @@
|
|||
package org.elasticsearch.xpack.sql.analysis.analyzer;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.xpack.sql.analysis.AnalysisException;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.EsIndex;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.GetIndexResult;
|
||||
import org.elasticsearch.xpack.sql.expression.function.DefaultFunctionRegistry;
|
||||
import org.elasticsearch.xpack.sql.parser.SqlParser;
|
||||
import org.elasticsearch.xpack.sql.type.DataType;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
import org.elasticsearch.xpack.sql.type.TypesTests;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
@TestLogging("org.elasticsearch.xpack.sql:TRACE")
|
||||
public class VerifierErrorMessagesTests extends ESTestCase {
|
||||
private SqlParser parser = new SqlParser(DateTimeZone.UTC);
|
||||
|
||||
private String verify(String sql) {
|
||||
Map<String, DataType> mapping = new LinkedHashMap<>();
|
||||
mapping.put("bool", DataTypes.BOOLEAN);
|
||||
mapping.put("int", DataTypes.INTEGER);
|
||||
mapping.put("text", DataTypes.TEXT);
|
||||
mapping.put("keyword", DataTypes.KEYWORD);
|
||||
Map<String, DataType> mapping = TypesTests.loadMapping("mapping-multi-field-variation.json");
|
||||
EsIndex test = new EsIndex("test", mapping);
|
||||
return verify(GetIndexResult.valid(test), sql);
|
||||
}
|
||||
|
|
|
@ -10,12 +10,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
|||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.ProcessingHitExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractorTests;
|
||||
import org.elasticsearch.xpack.sql.session.Cursor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -39,9 +37,7 @@ public class ScrollCursorTests extends AbstractWireSerializingTestCase<ScrollCur
|
|||
options.add(() -> ProcessingHitExtractorTests.randomProcessingHitExtractor(depth));
|
||||
}
|
||||
options.add(ConstantExtractorTests::randomConstantExtractor);
|
||||
options.add(DocValueExtractorTests::randomDocValueExtractor);
|
||||
options.add(InnerHitExtractorTests::randomInnerHitExtractor);
|
||||
options.add(SourceExtractorTests::randomSourceExtractor);
|
||||
options.add(FieldHitExtractorTests::randomFieldHitExtractor);
|
||||
return randomFrom(options).get();
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilde
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.expression.RootFieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.FieldAttribute;
|
||||
import org.elasticsearch.xpack.sql.expression.function.Score;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.Aggs;
|
||||
import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg;
|
||||
|
@ -28,12 +28,11 @@ import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery;
|
|||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
import org.elasticsearch.xpack.sql.type.DataTypes;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.elasticsearch.search.sort.SortBuilders.fieldSort;
|
||||
import static org.elasticsearch.search.sort.SortBuilders.scoreSort;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class SourceGeneratorTests extends ESTestCase {
|
||||
|
||||
public void testNoQueryNoFilter() {
|
||||
|
@ -81,26 +80,26 @@ public class SourceGeneratorTests extends ESTestCase {
|
|||
|
||||
public void testSelectScoreForcesTrackingScore() {
|
||||
QueryContainer container = new QueryContainer()
|
||||
.addColumn(new Score(new Location(1, 1)).toAttribute());
|
||||
.addColumn(new Score(new Location(1, 1)).toAttribute());
|
||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||
assertTrue(sourceBuilder.trackScores());
|
||||
}
|
||||
|
||||
public void testSortScoreSpecified() {
|
||||
QueryContainer container = new QueryContainer()
|
||||
.sort(new ScoreSort(Direction.DESC));
|
||||
.sort(new ScoreSort(Direction.DESC));
|
||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||
assertEquals(singletonList(scoreSort()), sourceBuilder.sorts());
|
||||
}
|
||||
|
||||
public void testSortFieldSpecified() {
|
||||
QueryContainer container = new QueryContainer()
|
||||
.sort(new AttributeSort(new RootFieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.ASC));
|
||||
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.ASC));
|
||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||
assertEquals(singletonList(fieldSort("test").order(SortOrder.ASC)), sourceBuilder.sorts());
|
||||
|
||||
container = new QueryContainer()
|
||||
.sort(new AttributeSort(new RootFieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.DESC));
|
||||
.sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.DESC));
|
||||
sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||
assertEquals(singletonList(fieldSort("test").order(SortOrder.DESC)), sourceBuilder.sorts());
|
||||
}
|
||||
|
@ -112,9 +111,9 @@ public class SourceGeneratorTests extends ESTestCase {
|
|||
|
||||
public void testNoSortIfAgg() {
|
||||
QueryContainer container = new QueryContainer()
|
||||
.addGroups(singletonList(new GroupByColumnAgg("group_id", "", "group_column")))
|
||||
.addAgg("group_id", new AvgAgg("agg_id", "", "avg_column"));
|
||||
.addGroups(singletonList(new GroupByColumnAgg("group_id", "", "group_column")))
|
||||
.addAgg("group_id", new AvgAgg("agg_id", "", "avg_column"));
|
||||
SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10));
|
||||
assertNull(sourceBuilder.sorts());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,60 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
public class DocValueExtractorTests extends AbstractWireSerializingTestCase<DocValueExtractor> {
|
||||
public static DocValueExtractor randomDocValueExtractor() {
|
||||
return new DocValueExtractor(randomAlphaOfLength(5));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DocValueExtractor createTestInstance() {
|
||||
return randomDocValueExtractor();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<DocValueExtractor> instanceReader() {
|
||||
return DocValueExtractor::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DocValueExtractor mutateInstance(DocValueExtractor instance) throws IOException {
|
||||
return new DocValueExtractor(instance.toString().substring(1) + "mutated");
|
||||
}
|
||||
|
||||
public void testGet() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
DocValueExtractor extractor = new DocValueExtractor(fieldName);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
List<Object> documentFieldValues = new ArrayList<>();
|
||||
documentFieldValues.add(new Object());
|
||||
if (randomBoolean()) {
|
||||
documentFieldValues.add(new Object());
|
||||
}
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, documentFieldValues);
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
assertEquals(documentFieldValues.get(0), extractor.get(hit));
|
||||
}
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("%incoming_links", new DocValueExtractor("incoming_links").toString());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,207 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
import org.elasticsearch.xpack.sql.execution.ExecutionException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.singletonMap;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class FieldHitExtractorTests extends AbstractWireSerializingTestCase<FieldHitExtractor> {
|
||||
public static FieldHitExtractor randomFieldHitExtractor() {
|
||||
return new FieldHitExtractor(randomAlphaOfLength(5), randomBoolean(), randomAlphaOfLength(5));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FieldHitExtractor createTestInstance() {
|
||||
return randomFieldHitExtractor();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<FieldHitExtractor> instanceReader() {
|
||||
return FieldHitExtractor::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FieldHitExtractor mutateInstance(FieldHitExtractor instance) throws IOException {
|
||||
return new FieldHitExtractor(instance.fieldName() + "mutated", true, instance.hitName());
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3082")
|
||||
public void testGetNested() throws IOException {
|
||||
fail("implement after we're sure of the InnerHitExtractor's implementation");
|
||||
}
|
||||
|
||||
public void testGetDottedValueWithDocValues() {
|
||||
String grandparent = randomAlphaOfLength(5);
|
||||
String parent = randomAlphaOfLength(5);
|
||||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
||||
List<Object> documentFieldValues = new ArrayList<>();
|
||||
if (randomBoolean()) {
|
||||
documentFieldValues.add(new Object());
|
||||
}
|
||||
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, documentFieldValues);
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0);
|
||||
assertEquals(result, extractor.get(hit));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetDottedValueWithSource() throws Exception {
|
||||
String grandparent = randomAlphaOfLength(5);
|
||||
String parent = randomAlphaOfLength(5);
|
||||
String child = randomAlphaOfLength(5);
|
||||
String fieldName = grandparent + "." + parent + "." + child;
|
||||
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
/* We use values that are parsed from json as "equal" to make the
|
||||
* test simpler. */
|
||||
@SuppressWarnings("unchecked")
|
||||
Supplier<Object> valueSupplier = randomFrom(
|
||||
() -> randomAlphaOfLength(5),
|
||||
() -> randomInt(),
|
||||
() -> randomDouble());
|
||||
Object value = valueSupplier.get();
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
boolean hasGrandparent = randomBoolean();
|
||||
boolean hasParent = randomBoolean();
|
||||
boolean hasChild = randomBoolean();
|
||||
boolean hasSource = hasGrandparent && hasParent && hasChild;
|
||||
|
||||
source.startObject();
|
||||
if (hasGrandparent) {
|
||||
source.startObject(grandparent);
|
||||
if (hasParent) {
|
||||
source.startObject(parent);
|
||||
if (hasChild) {
|
||||
source.field(child, value);
|
||||
if (randomBoolean()) {
|
||||
source.field(fieldName + randomAlphaOfLength(3), value + randomAlphaOfLength(3));
|
||||
}
|
||||
}
|
||||
source.endObject();
|
||||
}
|
||||
source.endObject();
|
||||
}
|
||||
source.endObject();
|
||||
BytesReference sourceRef = source.bytes();
|
||||
hit.sourceRef(sourceRef);
|
||||
Object extract = extractor.get(hit);
|
||||
assertEquals(hasSource ? value : null, extract);
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
List<Object> documentFieldValues = new ArrayList<>();
|
||||
if (randomBoolean()) {
|
||||
documentFieldValues.add(new Object());
|
||||
}
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, documentFieldValues);
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0);
|
||||
assertEquals(result, extractor.get(hit));
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetSource() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
/* We use values that are parsed from json as "equal" to make the
|
||||
* test simpler. */
|
||||
@SuppressWarnings("unchecked")
|
||||
Supplier<Object> valueSupplier = randomFrom(
|
||||
() -> randomAlphaOfLength(5),
|
||||
() -> randomInt(),
|
||||
() -> randomDouble());
|
||||
Object value = valueSupplier.get();
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
source.startObject(); {
|
||||
source.field(fieldName, value);
|
||||
if (randomBoolean()) {
|
||||
source.field(fieldName + "_random_junk", value + "_random_junk");
|
||||
}
|
||||
}
|
||||
source.endObject();
|
||||
BytesReference sourceRef = source.bytes();
|
||||
hit.sourceRef(sourceRef);
|
||||
assertEquals(value, extractor.get(hit));
|
||||
}
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("field@hit", new FieldHitExtractor("field", true, "hit").toString());
|
||||
}
|
||||
|
||||
public void testMultiValuedDocValue() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
FieldHitExtractor fe = new FieldHitExtractor(fieldName, true);
|
||||
SearchHit hit = new SearchHit(1);
|
||||
DocumentField field = new DocumentField(fieldName, asList("a", "b"));
|
||||
hit.fields(singletonMap(fieldName, field));
|
||||
ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.get(hit));
|
||||
assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported"));
|
||||
}
|
||||
|
||||
public void testExtractSourcePath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c", false);
|
||||
Object value = new Object();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
assertThat(fe.extractFromSource(map), is(value));
|
||||
}
|
||||
|
||||
public void testExtractSourceIncorrectPath() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", false);
|
||||
Object value = new Object();
|
||||
Map<String, Object> map = singletonMap("a", singletonMap("b", singletonMap("c", value)));
|
||||
ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.extractFromSource(map));
|
||||
assertThat(ex.getMessage(), is("Cannot extract value [a.b.c.d] from source"));
|
||||
}
|
||||
|
||||
public void testMultiValuedSource() {
|
||||
FieldHitExtractor fe = new FieldHitExtractor("a", false);
|
||||
Object value = new Object();
|
||||
Map<String, Object> map = singletonMap("a", asList(value, value));
|
||||
ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.extractFromSource(map));
|
||||
assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported"));
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class InnerHitExtractorTests extends AbstractWireSerializingTestCase<InnerHitExtractor> {
|
||||
public static InnerHitExtractor randomInnerHitExtractor() {
|
||||
return new InnerHitExtractor(randomAlphaOfLength(5), randomAlphaOfLength(5), randomBoolean());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InnerHitExtractor createTestInstance() {
|
||||
return randomInnerHitExtractor();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<InnerHitExtractor> instanceReader() {
|
||||
return InnerHitExtractor::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InnerHitExtractor mutateInstance(InnerHitExtractor instance) throws IOException {
|
||||
return new InnerHitExtractor(instance.hitName() + "mustated", instance.fieldName(), true);
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3082")
|
||||
public void testGet() throws IOException {
|
||||
fail("implement after we're sure of the InnerHitExtractor's implementation");
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("field@hit", new InnerHitExtractor("hit", "field", true).toString());
|
||||
}
|
||||
}
|
|
@ -69,7 +69,7 @@ public class ProcessingHitExtractorTests extends AbstractWireSerializingTestCase
|
|||
|
||||
public void testGet() {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
ChainingProcessor extractor = new ChainingProcessor(new HitExtractorProcessor(new DocValueExtractor(fieldName)), new MathProcessor(MathOperation.LOG));
|
||||
ChainingProcessor extractor = new ChainingProcessor(new HitExtractorProcessor(new FieldHitExtractor(fieldName, true)), new MathProcessor(MathOperation.LOG));
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.execution.search.extractor;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.AbstractWireSerializingTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class SourceExtractorTests extends AbstractWireSerializingTestCase<SourceExtractor> {
|
||||
public static SourceExtractor randomSourceExtractor() {
|
||||
return new SourceExtractor(randomAlphaOfLength(5));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SourceExtractor createTestInstance() {
|
||||
return randomSourceExtractor();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Reader<SourceExtractor> instanceReader() {
|
||||
return SourceExtractor::new;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected SourceExtractor mutateInstance(SourceExtractor instance) throws IOException {
|
||||
return new SourceExtractor(instance.toString().substring(1) + "mutated");
|
||||
}
|
||||
|
||||
public void testGet() throws IOException {
|
||||
String fieldName = randomAlphaOfLength(5);
|
||||
SourceExtractor extractor = new SourceExtractor(fieldName);
|
||||
|
||||
int times = between(1, 1000);
|
||||
for (int i = 0; i < times; i++) {
|
||||
/* We use values that are parsed from json as "equal" to make the
|
||||
* test simpler. */
|
||||
@SuppressWarnings("unchecked")
|
||||
Supplier<Object> valueSupplier = randomFrom(
|
||||
() -> randomAlphaOfLength(5),
|
||||
() -> randomInt(),
|
||||
() -> randomDouble());
|
||||
Object value = valueSupplier.get();
|
||||
SearchHit hit = new SearchHit(1);
|
||||
XContentBuilder source = JsonXContent.contentBuilder();
|
||||
source.startObject(); {
|
||||
source.field(fieldName, value);
|
||||
if (randomBoolean()) {
|
||||
source.field(fieldName + "_random_junk", value + "_random_junk");
|
||||
}
|
||||
}
|
||||
source.endObject();
|
||||
BytesReference sourceRef = source.bytes();
|
||||
hit.sourceRef(sourceRef);
|
||||
assertEquals(value, extractor.get(hit));
|
||||
}
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("#name", new SourceExtractor("name").toString());
|
||||
}
|
||||
}
|
|
@ -20,6 +20,7 @@ import java.util.Locale;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
|
||||
|
@ -80,9 +81,9 @@ public class QuotingTests extends ESTestCase {
|
|||
Expression exp = new SqlParser(DateTimeZone.UTC).createExpression(quote + qualifier + quote + "." + quote + name + quote);
|
||||
assertThat(exp, instanceOf(UnresolvedAttribute.class));
|
||||
UnresolvedAttribute ua = (UnresolvedAttribute) exp;
|
||||
assertThat(ua.name(), equalTo(name));
|
||||
assertThat(ua.name(), equalTo(qualifier + "." + name));
|
||||
assertThat(ua.qualifiedName(), equalTo(qualifier + "." + name));
|
||||
assertThat(ua.qualifier(), equalTo(qualifier));
|
||||
assertThat(ua.qualifier(), is(nullValue()));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.parser;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.tree.Location;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class IdentifierBuilderTests extends ESTestCase {
|
||||
|
||||
private static Location L = new Location(1, 10);
|
||||
|
||||
public void testTypicalIndex() throws Exception {
|
||||
IdentifierBuilder.validateIndex("some-index", L);
|
||||
}
|
||||
|
||||
public void testInternalIndex() throws Exception {
|
||||
IdentifierBuilder.validateIndex(".some-internal-index-2020-02-02", L);
|
||||
}
|
||||
|
||||
public void testIndexPattern() throws Exception {
|
||||
IdentifierBuilder.validateIndex(".some-*", L);
|
||||
}
|
||||
|
||||
public void testInvalidIndex() throws Exception {
|
||||
ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("some,index", L));
|
||||
assertThat(pe.getMessage(), is("line 1:12: Invalid index name (illegal character ,) some,index"));
|
||||
}
|
||||
|
||||
public void testUpperCasedIndex() throws Exception {
|
||||
ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("thisIsAnIndex", L));
|
||||
assertThat(pe.getMessage(), is("line 1:12: Invalid index name (needs to be lowercase) thisIsAnIndex"));
|
||||
}
|
||||
}
|
|
@ -43,7 +43,6 @@ public class VerifierErrorMessagesTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testMultiGroupBy() {
|
||||
// TODO: location needs to be updated after merging extend-having
|
||||
assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]",
|
||||
verify("SELECT bool FROM test GROUP BY bool, keyword"));
|
||||
}
|
||||
|
|
|
@ -0,0 +1,199 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.sql.type;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.xpack.sql.analysis.index.MappingException;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.DATE;
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.INTEGER;
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD;
|
||||
import static org.elasticsearch.xpack.sql.type.DataTypes.TEXT;
|
||||
import static org.hamcrest.CoreMatchers.startsWith;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class TypesTests extends ESTestCase {
|
||||
|
||||
public void testNullMap() throws Exception {
|
||||
Map<String, DataType> fromEs = Types.fromEs(null);
|
||||
assertThat(fromEs.isEmpty(), is(true));
|
||||
}
|
||||
|
||||
public void testEmptyMap() throws Exception {
|
||||
Map<String, DataType> fromEs = Types.fromEs(emptyMap());
|
||||
assertThat(fromEs.isEmpty(), is(true));
|
||||
}
|
||||
|
||||
public void testBasicMapping() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-basic.json");
|
||||
assertThat(mapping.size(), is(6));
|
||||
assertThat(mapping.get("emp_no"), is(INTEGER));
|
||||
assertThat(mapping.get("first_name"), instanceOf(TextType.class));
|
||||
assertThat(mapping.get("last_name"), is(TEXT));
|
||||
assertThat(mapping.get("gender"), is(KEYWORD));
|
||||
assertThat(mapping.get("salary"), is(INTEGER));
|
||||
}
|
||||
|
||||
public void testDefaultStringMapping() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-default-string.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
assertThat(mapping.get("dep_no").same(TEXT), is(true));
|
||||
}
|
||||
|
||||
public void testTextField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-text.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("full_name");
|
||||
assertThat(type, instanceOf(TextType.class));
|
||||
assertThat(type.hasDocValues(), is(false));
|
||||
TextType ttype = (TextType) type;
|
||||
assertThat(type.precision(), is(Integer.MAX_VALUE));
|
||||
assertThat(ttype.hasFieldData(), is(false));
|
||||
}
|
||||
|
||||
public void testKeywordField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-keyword.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("full_name");
|
||||
assertThat(type, instanceOf(KeywordType.class));
|
||||
assertThat(type.hasDocValues(), is(true));
|
||||
assertThat(type.precision(), is(256));
|
||||
}
|
||||
|
||||
public void testDateField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-date.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("date");
|
||||
assertThat(type, is(DATE));
|
||||
assertThat(type.hasDocValues(), is(true));
|
||||
assertThat(type.precision(), is(19));
|
||||
|
||||
DateType dtype = (DateType) type;
|
||||
List<String> formats = dtype.formats();
|
||||
assertThat(formats, hasSize(3));
|
||||
}
|
||||
|
||||
public void testDateNoFormat() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-date-no-format.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("date");
|
||||
assertThat(type, is(DATE));
|
||||
assertThat(type.hasDocValues(), is(true));
|
||||
DateType dtype = (DateType) type;
|
||||
// default types
|
||||
assertThat(dtype.formats(), hasSize(2));
|
||||
}
|
||||
|
||||
public void testDateMulti() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-date-multi.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("date");
|
||||
assertThat(type, is(DATE));
|
||||
assertThat(type.hasDocValues(), is(true));
|
||||
DateType dtype = (DateType) type;
|
||||
// default types
|
||||
assertThat(dtype.formats(), hasSize(1));
|
||||
}
|
||||
|
||||
public void testDocValueField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-docvalues.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("session_id");
|
||||
assertThat(type, instanceOf(KeywordType.class));
|
||||
assertThat(type.precision(), is(15));
|
||||
assertThat(type.hasDocValues(), is(false));
|
||||
}
|
||||
|
||||
public void testDottedField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-object.json");
|
||||
|
||||
assertThat(mapping.size(), is(2));
|
||||
DataType type = mapping.get("manager");
|
||||
assertThat(type.isPrimitive(), is(false));
|
||||
assertThat(type, instanceOf(ObjectType.class));
|
||||
ObjectType ot = (ObjectType) type;
|
||||
Map<String, DataType> children = ot.properties();
|
||||
assertThat(children.size(), is(2));
|
||||
DataType names = children.get("name");
|
||||
children = ((ObjectType) names).properties();
|
||||
assertThat(children.size(), is(2));
|
||||
assertThat(children.get("first"), is(TEXT));
|
||||
}
|
||||
|
||||
public void testMultiField() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-multi-field.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("text");
|
||||
assertThat(type.isPrimitive(), is(true));
|
||||
assertThat(type, instanceOf(TextType.class));
|
||||
TextType tt = (TextType) type;
|
||||
Map<String, DataType> fields = tt.fields();
|
||||
assertThat(fields.size(), is(2));
|
||||
assertThat(fields.get("raw"), is(KEYWORD));
|
||||
assertThat(fields.get("english"), is(TEXT));
|
||||
}
|
||||
|
||||
public void testMultiFieldTooManyOptions() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-multi-field.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("text");
|
||||
assertThat(type.isPrimitive(), is(true));
|
||||
assertThat(type, instanceOf(TextType.class));
|
||||
TextType tt = (TextType) type;
|
||||
Map<String, DataType> fields = tt.fields();
|
||||
assertThat(fields.size(), is(2));
|
||||
assertThat(fields.get("raw"), is(KEYWORD));
|
||||
assertThat(fields.get("english"), is(TEXT));
|
||||
}
|
||||
|
||||
public void testNestedDoc() throws Exception {
|
||||
Map<String, DataType> mapping = loadMapping("mapping-nested.json");
|
||||
|
||||
assertThat(mapping.size(), is(1));
|
||||
DataType type = mapping.get("dep");
|
||||
assertThat(type.isPrimitive(), is(false));
|
||||
assertThat(type, instanceOf(NestedType.class));
|
||||
NestedType ot = (NestedType) type;
|
||||
Map<String, DataType> children = ot.properties();
|
||||
assertThat(children.size(), is(4));
|
||||
assertThat(children.get("dep_name"), is(TEXT));
|
||||
assertThat(children.get("start_date"), is(DATE));
|
||||
}
|
||||
|
||||
public void testGeoField() throws Exception {
|
||||
MappingException ex = expectThrows(MappingException.class, () -> loadMapping("mapping-geo.json"));
|
||||
assertThat(ex.getMessage(), is("Unsupported mapping type geo_point"));
|
||||
}
|
||||
|
||||
public void testUnsupportedTypes() throws Exception {
|
||||
MappingException ex = expectThrows(MappingException.class, () -> loadMapping("mapping-unsupported.json"));
|
||||
assertThat(ex.getMessage(), startsWith("Unsupported mapping type"));
|
||||
}
|
||||
|
||||
public static Map<String, DataType> loadMapping(String name) {
|
||||
InputStream stream = TypesTests.class.getResourceAsStream("/" + name);
|
||||
assertNotNull("Could not find mapping resource:" + name, stream);
|
||||
return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, randomBoolean()));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
{
|
||||
"properties" : {
|
||||
"emp_no" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"first_name" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"gender" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"languages" : {
|
||||
"type" : "byte"
|
||||
},
|
||||
"last_name" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"salary" : {
|
||||
"type" : "integer"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"properties": {
|
||||
"date": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd || basic_time || year"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"properties" : {
|
||||
"dep_no" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"keyword" : {
|
||||
"type" : "keyword",
|
||||
"ignore_above" : 256
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"properties" : {
|
||||
"session_id" : {
|
||||
"type" : "keyword",
|
||||
"ignore_above" : 15,
|
||||
"doc_values" : false
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"properties" : {
|
||||
"location" : {
|
||||
"type" : "geo_point"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"properties" : {
|
||||
"full_name" : {
|
||||
"type" : "keyword",
|
||||
"ignore_above" : 256
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"properties" : {
|
||||
"text" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"raw" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"key" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"properties" : {
|
||||
"bool" : { "type" : "boolean" },
|
||||
"int" : { "type" : "integer" },
|
||||
"text" : { "type" : "text" },
|
||||
"keyword" : { "type" : "keyword" },
|
||||
"some" : {
|
||||
"properties" : {
|
||||
"dotted" : {
|
||||
"properties" : {
|
||||
"field" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"string" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"normalized" : {
|
||||
"type" : "keyword",
|
||||
"normalizer" : "some_normalizer"
|
||||
},
|
||||
"typical" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ambiguous" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"one" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"two" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"normalized" : {
|
||||
"type" : "keyword",
|
||||
"normalizer" : "some_normalizer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"properties" : {
|
||||
"text" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"raw" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"english" : {
|
||||
"type" : "text",
|
||||
"analyzer" : "english"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
{
|
||||
"properties" : {
|
||||
"dep" : {
|
||||
"type" : "nested",
|
||||
"properties" : {
|
||||
"dep_name" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"dep_no" : {
|
||||
"type" : "text",
|
||||
"fields" : {
|
||||
"keyword" : {
|
||||
"type" : "keyword",
|
||||
"ignore_above" : 256
|
||||
}
|
||||
}
|
||||
},
|
||||
"end_date" : {
|
||||
"type" : "date"
|
||||
},
|
||||
"start_date" : {
|
||||
"type" : "date"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"properties" : {
|
||||
"region" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"manager" : {
|
||||
"properties" : {
|
||||
"age" : {
|
||||
"type" : "integer"
|
||||
},
|
||||
"name" : {
|
||||
"properties" : {
|
||||
"first" : {
|
||||
"type" : "text"
|
||||
},
|
||||
"last" : {
|
||||
"type" : "text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"properties" : {
|
||||
"parent_child" : {
|
||||
"type" : "join",
|
||||
"relations" : {
|
||||
"question" : "answer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"properties" : {
|
||||
"full_name" : {
|
||||
"type" : "text",
|
||||
"fielddata" : false
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
{
|
||||
"properties" : {
|
||||
"range" : {
|
||||
"type" : "integer_range"
|
||||
},
|
||||
"time_frame" : {
|
||||
"type" : "date_range",
|
||||
"format" : "yyyy-MM-dd"
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue