diff --git a/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/CliExplainIT.java b/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/CliExplainIT.java index 6dbe559c0e0..f0c8309a189 100644 --- a/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/CliExplainIT.java +++ b/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/CliExplainIT.java @@ -25,15 +25,15 @@ public class CliExplainIT extends CliIntegrationTestCase { assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test"), containsString("plan")); assertThat(readLine(), startsWith("----------")); - assertThat(readLine(), startsWith("Project[[test_field{r}#")); + assertThat(readLine(), startsWith("Project[[test_field{f}#")); assertThat(readLine(), startsWith("\\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][test_field{r}#")); + assertThat(readLine(), startsWith(" \\_EsRelation[test][test_field{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test"), containsString("plan")); assertThat(readLine(), startsWith("----------")); - assertThat(readLine(), startsWith("Project[[test_field{r}#")); - assertThat(readLine(), startsWith("\\_EsRelation[test][test_field{r}#")); + assertThat(readLine(), startsWith("Project[[test_field{f}#")); + assertThat(readLine(), startsWith("\\_EsRelation[test][test_field{f}#")); assertEquals("", readLine()); // TODO in this case we should probably remove the source filtering entirely. Right? It costs but we don't need it. @@ -72,17 +72,17 @@ public class CliExplainIT extends CliIntegrationTestCase { assertThat(command("EXPLAIN " + (randomBoolean() ? "" : "(PLAN ANALYZED) ") + "SELECT * FROM test WHERE i = 2"), containsString("plan")); assertThat(readLine(), startsWith("----------")); - assertThat(readLine(), startsWith("Project[[i{r}#")); - assertThat(readLine(), startsWith("\\_Filter[i{r}#")); + assertThat(readLine(), startsWith("Project[[i{f}#")); + assertThat(readLine(), startsWith("\\_Filter[i{f}#")); assertThat(readLine(), startsWith(" \\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#")); + assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT * FROM test WHERE i = 2"), containsString("plan")); assertThat(readLine(), startsWith("----------")); - assertThat(readLine(), startsWith("Project[[i{r}#")); - assertThat(readLine(), startsWith("\\_Filter[i{r}#")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#")); + assertThat(readLine(), startsWith("Project[[i{f}#")); + assertThat(readLine(), startsWith("\\_Filter[i{f}#")); + assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN EXECUTABLE) SELECT * FROM test WHERE i = 2"), containsString("plan")); @@ -132,13 +132,13 @@ public class CliExplainIT extends CliIntegrationTestCase { assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("Aggregate[[],[COUNT(1)#")); assertThat(readLine(), startsWith("\\_SubQueryAlias[test]")); - assertThat(readLine(), startsWith(" \\_EsRelation[test][i{r}#")); + assertThat(readLine(), startsWith(" \\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN OPTIMIZED) SELECT COUNT(*) FROM test"), containsString("plan")); assertThat(readLine(), startsWith("----------")); assertThat(readLine(), startsWith("Aggregate[[],[COUNT(1)#")); - assertThat(readLine(), startsWith("\\_EsRelation[test][i{r}#")); + assertThat(readLine(), startsWith("\\_EsRelation[test][i{f}#")); assertEquals("", readLine()); assertThat(command("EXPLAIN (PLAN EXECUTABLE) SELECT COUNT(*) FROM test"), containsString("plan")); diff --git a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/CliProtoHandler.java b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/CliProtoHandler.java new file mode 100644 index 00000000000..3df76b919f7 --- /dev/null +++ b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/CliProtoHandler.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.qa.sql.embed; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.xpack.sql.cli.net.protocol.Proto; +import org.elasticsearch.xpack.sql.plugin.RestSqlCliAction; + +import java.io.DataInput; +import java.io.IOException; + +import static org.mockito.Mockito.mock; + +class CliProtoHandler extends ProtoHandler { + private final RestSqlCliAction action; + + CliProtoHandler(Client client) { + super(client); + action = new RestSqlCliAction(Settings.EMPTY, mock(RestController.class)); + } + + @Override + protected void handle(RestChannel channel, DataInput in) throws IOException { + action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel); + } +} \ No newline at end of file diff --git a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/JdbcProtoHandler.java b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/JdbcProtoHandler.java index 9e76b9f6343..9818c53c848 100644 --- a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/JdbcProtoHandler.java +++ b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/JdbcProtoHandler.java @@ -5,12 +5,10 @@ */ package org.elasticsearch.xpack.qa.sql.embed; -import com.sun.net.httpserver.HttpExchange; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; -import org.elasticsearch.test.rest.FakeRestChannel; -import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.jdbc.net.protocol.Proto; import org.elasticsearch.xpack.sql.plugin.RestSqlJdbcAction; @@ -31,14 +29,7 @@ class JdbcProtoHandler extends ProtoHandler { } @Override - protected void handle(HttpExchange http, DataInput in) throws IOException { - FakeRestChannel channel = new FakeRestChannel(new FakeRestRequest(), true, 1); - try { - action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel); - while (false == channel.await()) {} - sendHttpResponse(http, channel.capturedResponse().content()); - } catch (Exception e) { - fail(http, e); - } + protected void handle(RestChannel channel, DataInput in) throws IOException { + action.operation(Proto.INSTANCE.readRequest(in), client).accept(channel); } } \ No newline at end of file diff --git a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/ProtoHandler.java b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/ProtoHandler.java index f75b1bbb5af..870da509387 100644 --- a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/ProtoHandler.java +++ b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/embed/ProtoHandler.java @@ -5,6 +5,9 @@ */ package org.elasticsearch.xpack.qa.sql.embed; +import io.netty.handler.codec.http.HttpHeaderNames; + +import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; @@ -12,10 +15,15 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.client.Client; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.rest.FakeRestChannel; +import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xpack.sql.analysis.index.IndexResolver; import org.elasticsearch.xpack.sql.execution.PlanExecutor; @@ -23,6 +31,9 @@ import java.io.DataInput; import java.io.DataInputStream; import java.io.IOException; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; + public abstract class ProtoHandler implements HttpHandler, AutoCloseable { private static PlanExecutor planExecutor(EmbeddedModeFilterClient client) { @@ -38,8 +49,7 @@ public abstract class ProtoHandler implements HttpHandler, AutoCloseable { protected ProtoHandler(Client client) { NodesInfoResponse niResponse = client.admin().cluster().prepareNodesInfo("_local").clear().get(TV); - this.client = !(client instanceof EmbeddedModeFilterClient) ? new EmbeddedModeFilterClient( - client) : (EmbeddedModeFilterClient) client; + this.client = client instanceof EmbeddedModeFilterClient ? (EmbeddedModeFilterClient) client : new EmbeddedModeFilterClient(client); this.client.setPlanExecutor(planExecutor(this.client)); info = niResponse.getNodes().get(0); clusterName = niResponse.getClusterName().value(); @@ -55,32 +65,34 @@ public abstract class ProtoHandler implements HttpHandler, AutoCloseable { return; } + FakeRestChannel channel = new FakeRestChannel( + new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(singletonMap("error_trace", "")).build(), true, 1); try (DataInputStream in = new DataInputStream(http.getRequestBody())) { - handle(http, in); - } catch (Exception ex) { - fail(http, ex); - } - } - - protected abstract void handle(HttpExchange http, DataInput in) throws IOException; - - protected void sendHttpResponse(HttpExchange http, BytesReference response) throws IOException { - // first do the conversion in case an exception is triggered - if (http.getResponseHeaders().isEmpty()) { - http.sendResponseHeaders(RestStatus.OK.getStatus(), 0); - } - response.writeTo(http.getResponseBody()); - http.close(); - } - - protected void fail(HttpExchange http, Exception ex) { - log.error("Caught error while transmitting response", ex); - try { - // the error conversion has failed, halt - if (http.getResponseHeaders().isEmpty()) { - http.sendResponseHeaders(RestStatus.INTERNAL_SERVER_ERROR.getStatus(), -1); + handle(channel, in); + while (false == channel.await()) { } - } catch (IOException ioEx) { + sendHttpResponse(http, channel.capturedResponse()); + } catch (Exception e) { + sendHttpResponse(http, new BytesRestResponse(channel, e)); + } + } + + protected abstract void handle(RestChannel channel, DataInput in) throws IOException; + + protected void sendHttpResponse(HttpExchange http, RestResponse response) throws IOException { + try { + // first do the conversion in case an exception is triggered + if (http.getResponseHeaders().isEmpty()) { + http.sendResponseHeaders(response.status().getStatus(), response.content().length()); + + Headers headers = http.getResponseHeaders(); + headers.putIfAbsent(HttpHeaderNames.CONTENT_TYPE.toString(), singletonList(response.contentType())); + if (response.getHeaders() != null) { + headers.putAll(response.getHeaders()); + } + } + response.content().writeTo(http.getResponseBody()); + } catch (IOException ex) { log.error("Caught error while trying to catch error", ex); } finally { http.close(); diff --git a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java index b3724fbcede..693bf70ffd8 100644 --- a/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java +++ b/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/DataLoader.java @@ -93,7 +93,7 @@ public class DataLoader { if (titlesString == null) { throw new IllegalArgumentException("[" + location + "] must contain at least a title row"); } - List titles = Arrays.asList(titlesString.split(",")); + List titles = Arrays.asList(titlesString.split(",")); String line; while ((line = reader.readLine()) != null) { @@ -106,4 +106,5 @@ public class DataLoader { public static InputStream readFromJarUrl(URL source) throws IOException { return source.openStream(); } -} + +} \ No newline at end of file diff --git a/qa/sql/src/main/resources/debug.csv-spec b/qa/sql/src/main/resources/debug.csv-spec index 30d0048aa6b..5d97b2326d7 100644 --- a/qa/sql/src/main/resources/debug.csv-spec +++ b/qa/sql/src/main/resources/debug.csv-spec @@ -3,7 +3,7 @@ // debug -SELECT int FROM test GROUP BY AVG(int) + 2; +SELECT 5 + 2 AS a; table:s test_emp diff --git a/qa/sql/src/main/resources/debug.sql-spec b/qa/sql/src/main/resources/debug.sql-spec index ff2e2c5adf7..cd03b4764b7 100644 --- a/qa/sql/src/main/resources/debug.sql-spec +++ b/qa/sql/src/main/resources/debug.sql-spec @@ -3,4 +3,4 @@ // debug -SELECT int FROM test GROUP BY AVG(int) + 2; +SELECT 5 + 2 AS a; diff --git a/sql/server/src/main/antlr/SqlBase.g4 b/sql/server/src/main/antlr/SqlBase.g4 index b32810d8eee..f4e9ee0df17 100644 --- a/sql/server/src/main/antlr/SqlBase.g4 +++ b/sql/server/src/main/antlr/SqlBase.g4 @@ -188,18 +188,15 @@ primaryExpression | EXTRACT '(' field=identifier FROM valueExpression ')' #extract | constant #constantDefault | ASTERISK #star - | (qualifier=columnExpression '.')? ASTERISK #star + | (qualifiedName '.')? ASTERISK #star | identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall | '(' query ')' #subqueryExpression - | columnExpression #columnReference - | base=columnExpression '.' fieldName=identifier #dereference + | identifier #columnReference + | qualifiedName #dereference | '(' expression ')' #parenthesizedExpression ; -columnExpression - : ((alias=identifier | table=tableIdentifier) '.' )? name=identifier - ; - + constant : NULL #nullLiteral | identifier STRING #typeConstructor @@ -221,7 +218,7 @@ dataType ; qualifiedName - : identifier ('.' identifier)* + : (path=identifier '.')* name=identifier ; tableIdentifier @@ -251,16 +248,16 @@ number // http://developer.mimer.se/validator/sql-reserved-words.tml nonReserved - : ANALYZE | ANALYZED - | COLUMNS - | DEBUG - | EXECUTABLE | EXPLAIN - | FORMAT | FUNCTIONS | FROM - | GRAPHVIZ - | MAPPED - | OPTIMIZED - | PARSED | PHYSICAL | PLAN - | QUERY + : ANALYZE | ANALYZED + | COLUMNS + | DEBUG + | EXECUTABLE | EXPLAIN + | FORMAT | FUNCTIONS | FROM + | GRAPHVIZ + | MAPPED + | OPTIMIZED + | PARSED | PHYSICAL | PLAN + | QUERY | RLIKE | SCHEMAS | SHOW | TABLES | TEXT @@ -325,7 +322,6 @@ TEXT: 'TEXT'; TRUE: 'TRUE'; USING: 'USING'; VERIFY: 'VERIFY'; -WHEN: 'WHEN'; WHERE: 'WHERE'; WITH: 'WITH'; diff --git a/sql/server/src/main/antlr/SqlBase.tokens b/sql/server/src/main/antlr/SqlBase.tokens index 9df7e80006b..979aa5a99ca 100644 --- a/sql/server/src/main/antlr/SqlBase.tokens +++ b/sql/server/src/main/antlr/SqlBase.tokens @@ -60,33 +60,32 @@ TEXT=59 TRUE=60 USING=61 VERIFY=62 -WHEN=63 -WHERE=64 -WITH=65 -EQ=66 -NEQ=67 -LT=68 -LTE=69 -GT=70 -GTE=71 -PLUS=72 -MINUS=73 -ASTERISK=74 -SLASH=75 -PERCENT=76 -CONCAT=77 -STRING=78 -INTEGER_VALUE=79 -DECIMAL_VALUE=80 -IDENTIFIER=81 -DIGIT_IDENTIFIER=82 -QUOTED_IDENTIFIER=83 -BACKQUOTED_IDENTIFIER=84 -SIMPLE_COMMENT=85 -BRACKETED_COMMENT=86 -WS=87 -UNRECOGNIZED=88 -DELIMITER=89 +WHERE=63 +WITH=64 +EQ=65 +NEQ=66 +LT=67 +LTE=68 +GT=69 +GTE=70 +PLUS=71 +MINUS=72 +ASTERISK=73 +SLASH=74 +PERCENT=75 +CONCAT=76 +STRING=77 +INTEGER_VALUE=78 +DECIMAL_VALUE=79 +IDENTIFIER=80 +DIGIT_IDENTIFIER=81 +QUOTED_IDENTIFIER=82 +BACKQUOTED_IDENTIFIER=83 +SIMPLE_COMMENT=84 +BRACKETED_COMMENT=85 +WS=86 +UNRECOGNIZED=87 +DELIMITER=88 '('=1 ')'=2 ','=3 @@ -149,17 +148,16 @@ DELIMITER=89 'TRUE'=60 'USING'=61 'VERIFY'=62 -'WHEN'=63 -'WHERE'=64 -'WITH'=65 -'='=66 -'<'=68 -'<='=69 -'>'=70 -'>='=71 -'+'=72 -'-'=73 -'*'=74 -'/'=75 -'%'=76 -'||'=77 +'WHERE'=63 +'WITH'=64 +'='=65 +'<'=67 +'<='=68 +'>'=69 +'>='=70 +'+'=71 +'-'=72 +'*'=73 +'/'=74 +'%'=75 +'||'=76 diff --git a/sql/server/src/main/antlr/SqlBaseLexer.tokens b/sql/server/src/main/antlr/SqlBaseLexer.tokens index 5ae683b4c30..e6ed0c550ef 100644 --- a/sql/server/src/main/antlr/SqlBaseLexer.tokens +++ b/sql/server/src/main/antlr/SqlBaseLexer.tokens @@ -60,32 +60,31 @@ TEXT=59 TRUE=60 USING=61 VERIFY=62 -WHEN=63 -WHERE=64 -WITH=65 -EQ=66 -NEQ=67 -LT=68 -LTE=69 -GT=70 -GTE=71 -PLUS=72 -MINUS=73 -ASTERISK=74 -SLASH=75 -PERCENT=76 -CONCAT=77 -STRING=78 -INTEGER_VALUE=79 -DECIMAL_VALUE=80 -IDENTIFIER=81 -DIGIT_IDENTIFIER=82 -QUOTED_IDENTIFIER=83 -BACKQUOTED_IDENTIFIER=84 -SIMPLE_COMMENT=85 -BRACKETED_COMMENT=86 -WS=87 -UNRECOGNIZED=88 +WHERE=63 +WITH=64 +EQ=65 +NEQ=66 +LT=67 +LTE=68 +GT=69 +GTE=70 +PLUS=71 +MINUS=72 +ASTERISK=73 +SLASH=74 +PERCENT=75 +CONCAT=76 +STRING=77 +INTEGER_VALUE=78 +DECIMAL_VALUE=79 +IDENTIFIER=80 +DIGIT_IDENTIFIER=81 +QUOTED_IDENTIFIER=82 +BACKQUOTED_IDENTIFIER=83 +SIMPLE_COMMENT=84 +BRACKETED_COMMENT=85 +WS=86 +UNRECOGNIZED=87 '('=1 ')'=2 ','=3 @@ -148,17 +147,16 @@ UNRECOGNIZED=88 'TRUE'=60 'USING'=61 'VERIFY'=62 -'WHEN'=63 -'WHERE'=64 -'WITH'=65 -'='=66 -'<'=68 -'<='=69 -'>'=70 -'>='=71 -'+'=72 -'-'=73 -'*'=74 -'/'=75 -'%'=76 -'||'=77 +'WHERE'=63 +'WITH'=64 +'='=65 +'<'=67 +'<='=68 +'>'=69 +'>='=70 +'+'=71 +'-'=72 +'*'=73 +'/'=74 +'%'=75 +'||'=76 diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java index f09cbe60d6a..b86bb2af379 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Analyzer.java @@ -14,12 +14,11 @@ import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.AttributeSet; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.SubQueryExpression; -import org.elasticsearch.xpack.sql.expression.TypedAttribute; import org.elasticsearch.xpack.sql.expression.UnresolvedAlias; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.UnresolvedStar; @@ -46,7 +45,6 @@ import org.elasticsearch.xpack.sql.plan.logical.With; import org.elasticsearch.xpack.sql.rule.Rule; import org.elasticsearch.xpack.sql.rule.RuleExecutor; import org.elasticsearch.xpack.sql.tree.Node; -import org.elasticsearch.xpack.sql.type.CompoundDataType; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypeConversion; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -62,7 +60,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; -import java.util.stream.Stream; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; @@ -163,27 +160,15 @@ public class Analyzer extends RuleExecutor { private static Attribute resolveAgainstList(UnresolvedAttribute u, List attrList, boolean lenient) { List matches = new ArrayList<>(); - // use the qualifier if present - if (u.qualifier() != null) { - for (Attribute attribute : attrList) { - if (!attribute.synthetic()) { - if (Objects.equals(u.qualifiedName(), attribute.qualifiedName())) { - matches.add(attribute); - } - if (attribute instanceof NestedFieldAttribute) { - // since u might be unqualified but the parent shows up as a qualifier - if (Objects.equals(u.qualifiedName(), attribute.name())) { - matches.add(attribute.withLocation(u.location())); - } - } - } - } - } + // first try the qualified version + boolean qualified = u.qualifier() != null; - // if none is found, try to do a match just on the name (to filter out missing qualifiers) - if (matches.isEmpty()) { - for (Attribute attribute : attrList) { - if (!attribute.synthetic() && Objects.equals(u.name(), attribute.name())) { + for (Attribute attribute : attrList) { + if (!attribute.synthetic()) { + boolean match = qualified ? + Objects.equals(u.qualifiedName(), attribute.qualifiedName()) : + Objects.equals(u.name(), attribute.name()); + if (match) { matches.add(attribute.withLocation(u.location())); } } @@ -356,17 +341,18 @@ public class Analyzer extends RuleExecutor { return plan.transformExpressionsUp(e -> { if (e instanceof UnresolvedAttribute) { UnresolvedAttribute u = (UnresolvedAttribute) e; - NamedExpression named = resolveAgainstList(u, - plan.children().stream() - .flatMap(c -> c.output().stream()) - .collect(toList()), - false); + List childrenOutput = new ArrayList<>(); + for (LogicalPlan child : plan.children()) { + childrenOutput.addAll(child.output()); + } + NamedExpression named = resolveAgainstList(u, childrenOutput, false); // if resolved, return it; otherwise keep it in place to be resolved later if (named != null) { - // it's a compound type so convert it - if (named instanceof TypedAttribute && ((TypedAttribute) named).dataType() instanceof CompoundDataType) { - named = new UnresolvedStar(e.location(), - new UnresolvedAttribute(e.location(), u.name(), u.qualifier())); + // if it's a object/compound type, keep it unresolved with a nice error message + if (named instanceof FieldAttribute && !((FieldAttribute) named).dataType().isPrimitive()) { + FieldAttribute fa = (FieldAttribute) named; + named = u.withUnresolvedMessage( + "Cannot use field [" + fa.name() + "] (type " + fa.dataType().esName() + ") only its subfields"); } if (log.isTraceEnabled()) { @@ -381,42 +367,71 @@ public class Analyzer extends RuleExecutor { } private List expandProjections(List projections, LogicalPlan child) { - return projections.stream().flatMap(e -> { - // check if there's a qualifier - // no - means only top-level - // it is - return only that level - if (e instanceof UnresolvedStar) { - List output = child.output(); - UnresolvedStar us = (UnresolvedStar) e; + List result = new ArrayList<>(); - Stream stream = output.stream(); + List output = child.output(); + for (NamedExpression ne : projections) { + if (ne instanceof UnresolvedStar) { + UnresolvedStar us = (UnresolvedStar) ne; - if (us.qualifier() == null) { - stream = stream.filter(a -> !(a instanceof NestedFieldAttribute)); - } - - // if there's a qualifier, inspect that level + // a qualifier is specified - since this is a star, it should be a CompoundDataType if (us.qualifier() != null) { - // qualifier is selected, need to resolve that first. - Attribute qualifier = resolveAgainstList(us.qualifier(), output, false); - stream = stream.filter(a -> (a instanceof NestedFieldAttribute) - && Objects.equals(a.qualifier(), qualifier.qualifier()) - && Objects.equals(((NestedFieldAttribute) a).parentPath(), qualifier.name())); - } + // resolve the so-called qualifier first + // since this is an unresolved start we don't know whether it's a path or an actual qualifier + Attribute q = resolveAgainstList(us.qualifier(), output, false); - return stream.filter(a -> !(a.dataType() instanceof CompoundDataType)); - } - else if (e instanceof UnresolvedAlias) { - UnresolvedAlias ua = (UnresolvedAlias) e; - if (ua.child() instanceof UnresolvedStar) { - return child.output().stream(); + // now use the resolved 'qualifier' to match + for (Attribute attr : output) { + // filter the attributes that match based on their path + if (attr instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) attr; + if (q.qualifier() != null) { + if (Objects.equals(q.qualifiedName(), fa.qualifiedName())) { + result.add(fa.withLocation(attr.location())); + } + } else { + // use the path only to match non-compound types + if (Objects.equals(q.name(), fa.path())) { + result.add(fa.withLocation(attr.location())); + } + } + } + } + } else { + // add only primitives + // but filter out multi fields + Set seenMultiFields = new LinkedHashSet<>(); + + for (Attribute a : output) { + if (a.dataType().isPrimitive()) { + if (a instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) a; + if (!seenMultiFields.contains(fa.parent())) { + result.add(a); + seenMultiFields.add(a); + } + } else { + result.add(a); + } + } + } } - return Stream.of(e); + } else if (ne instanceof UnresolvedAlias) { + UnresolvedAlias ua = (UnresolvedAlias) ne; + if (ua.child() instanceof UnresolvedStar) { + // add only primitives + for (Attribute a : output) { + if (a.dataType().isPrimitive()) { + result.add(a); + } + } + } + } else { + result.add(ne); } - return Stream.of(e); - }) - .map(NamedExpression.class::cast) - .collect(toList()); + } + + return result; } // generate a new (right) logical plan with different IDs for all conflicting attributes diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index b7f06185143..29a31eee7e0 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -6,12 +6,10 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.xpack.sql.capabilities.Unresolvable; -import org.elasticsearch.xpack.sql.expression.Alias; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.Exists; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; -import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; @@ -129,15 +127,22 @@ abstract class Verifier { // handle Attributes different to provide more context if (ae instanceof UnresolvedAttribute) { UnresolvedAttribute ua = (UnresolvedAttribute) ae; - boolean useQualifier = ua.qualifier() != null; - List potentialMatches = new ArrayList<>(); - for (Attribute a : p.intputSet()) { - potentialMatches.add(useQualifier ? a.qualifiedName() : a.name()); - } + // only work out the synonyms for raw unresolved attributes + if (!ua.customMessage()) { + boolean useQualifier = ua.qualifier() != null; + List potentialMatches = new ArrayList<>(); + for (Attribute a : p.intputSet()) { + String nameCandidate = useQualifier ? a.qualifiedName() : a.name(); + // add only primitives (object types would only result in another error) + if (a.dataType().isPrimitive()) { + potentialMatches.add(nameCandidate); + } + } - List matches = StringUtils.findSimilar(ua.qualifiedName(), potentialMatches); - if (!matches.isEmpty()) { - ae = new UnresolvedAttribute(ua.location(), ua.name(), ua.qualifier(), UnresolvedAttribute.errorMessage(ua.qualifiedName(), matches)); + List matches = StringUtils.findSimilar(ua.qualifiedName(), potentialMatches); + if (!matches.isEmpty()) { + ae = ua.withUnresolvedMessage(UnresolvedAttribute.errorMessage(ua.qualifiedName(), matches)); + } } } @@ -261,7 +266,7 @@ abstract class Verifier { Expressions.names(a.groupings()))); groupingFailures.add(a); return false; - } + } } } return true; @@ -300,7 +305,7 @@ abstract class Verifier { a.aggregates().forEach(ne -> ne.collectFirstChildren(c -> checkGroupMatch(c, ne, a.groupings(), missing, functions))); - if (!missing.isEmpty()) { + if (!missing.isEmpty()) { String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY; localFailures.add(fail(missing.values().iterator().next(), "Cannot use non-grouped column" + plural + " %s, expected %s", Expressions.names(missing.keySet()), @@ -378,4 +383,4 @@ abstract class Verifier { .forEach(exp -> localFailures.add(fail(exp, "[SCORE()] cannot be an argument to a function"))), Function.class)); } -} +} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 6aa1ac6b602..e582045aab5 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.analysis.index; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; + import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; @@ -96,11 +97,7 @@ public class IndexResolver { } private static GetIndexResult buildGetIndexResult(String concreteIndex, String indexOrAlias, - ImmutableOpenMap mappings) { - if (concreteIndex.startsWith(".")) { - //Indices that start with "." are considered internal and should not be available to SQL - return GetIndexResult.notFound(indexOrAlias); - } + ImmutableOpenMap mappings) { // Make sure that the index contains only a single type MappingMetaData singleType = null; @@ -128,8 +125,12 @@ public class IndexResolver { return GetIndexResult.invalid( "[" + indexOrAlias + "] contains more than one type " + typeNames + " so it is incompatible with sql"); } else { - Map mapping = Types.fromEs(singleType.sourceAsMap()); - return GetIndexResult.valid(new EsIndex(indexOrAlias, mapping)); + try { + Map mapping = Types.fromEs(singleType.sourceAsMap()); + return GetIndexResult.valid(new EsIndex(indexOrAlias, mapping)); + } catch (MappingException ex) { + return GetIndexResult.invalid(ex.getMessage()); + } } } } \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/AbstractSearchHitRowSet.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/AbstractSearchHitRowSet.java index 5ac37c2482f..66308893fae 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/AbstractSearchHitRowSet.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/AbstractSearchHitRowSet.java @@ -44,7 +44,7 @@ abstract class AbstractSearchHitRowSet extends AbstractRowSet { String innerHit = null; for (HitExtractor ex : exts) { - innerHit = ex.innerHitName(); + innerHit = ex.hitName(); if (innerHit != null) { innerHits.add(innerHit); } @@ -96,7 +96,7 @@ abstract class AbstractSearchHitRowSet extends AbstractRowSet { @Override protected Object getColumn(int column) { HitExtractor e = extractors.get(column); - int extractorLevel = e.innerHitName() == null ? 0 : 1; + int extractorLevel = e.hitName() == null ? 0 : 1; SearchHit hit = null; SearchHit[] sh = hits; diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/Scroller.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/Scroller.java index 5922de72c12..6a47f741f45 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/Scroller.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/Scroller.java @@ -26,10 +26,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.ExecutionException; import org.elasticsearch.xpack.sql.execution.search.extractor.ComputingHitExtractor; -import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractor; +import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; -import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractor; -import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggPathInput; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AggValueInput; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.HitExtractorInput; @@ -40,7 +38,6 @@ import org.elasticsearch.xpack.sql.querydsl.agg.AggPath; import org.elasticsearch.xpack.sql.querydsl.container.AggRef; import org.elasticsearch.xpack.sql.querydsl.container.ColumnReference; import org.elasticsearch.xpack.sql.querydsl.container.ComputedRef; -import org.elasticsearch.xpack.sql.querydsl.container.NestedFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.QueryContainer; import org.elasticsearch.xpack.sql.querydsl.container.ScriptFieldRef; import org.elasticsearch.xpack.sql.querydsl.container.SearchHitFieldRef; @@ -239,14 +236,14 @@ public class Scroller { // if there's an id, try to setup next scroll if (scrollId != null && - // is all the content already retrieved? + // is all the content already retrieved? (Boolean.TRUE.equals(response.isTerminatedEarly()) || response.getHits().getTotalHits() == hits.length - // or maybe the limit has been reached - || (hits.length >= query.limit() && query.limit() > -1))) { - // if so, clear the scroll - clearScroll(response.getScrollId(), ActionListener.wrap( - succeeded -> listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), null)), - listener::onFailure)); + // or maybe the limit has been reached + || (hits.length >= query.limit() && query.limit() > -1))) { + // if so, clear the scroll + clearScroll(response.getScrollId(), ActionListener.wrap( + succeeded -> listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), null)), + listener::onFailure)); } else { listener.onResponse(new InitialSearchHitRowSet(schema, exts, hits, query.limit(), scrollId)); } @@ -273,17 +270,12 @@ public class Scroller { private HitExtractor createExtractor(ColumnReference ref) { if (ref instanceof SearchHitFieldRef) { SearchHitFieldRef f = (SearchHitFieldRef) ref; - return f.useDocValue() ? new DocValueExtractor(f.name()) : new SourceExtractor(f.name()); - } - - if (ref instanceof NestedFieldRef) { - NestedFieldRef f = (NestedFieldRef) ref; - return new InnerHitExtractor(f.parent(), f.name(), f.useDocValue()); + return new FieldHitExtractor(f.name(), f.useDocValue(), f.hitName()); } if (ref instanceof ScriptFieldRef) { ScriptFieldRef f = (ScriptFieldRef) ref; - return new DocValueExtractor(f.name()); + return new FieldHitExtractor(f.name(), true); } if (ref instanceof ComputedRef) { @@ -318,16 +310,28 @@ public class Scroller { try { ShardSearchFailure[] failure = response.getShardFailures(); if (!CollectionUtils.isEmpty(failure)) { - onFailure(new ExecutionException(failure[0].reason(), failure[0].getCause())); + cleanupScroll(response, new ExecutionException(failure[0].reason(), failure[0].getCause())); } handleResponse(response, listener); } catch (Exception ex) { - onFailure(ex); + cleanupScroll(response, ex); } } protected abstract void handleResponse(SearchResponse response, ActionListener listener); + // clean-up the scroll in case of exception + protected final void cleanupScroll(SearchResponse response, Exception ex) { + if (response != null && response.getScrollId() != null) { + client.prepareClearScroll().addScrollId(response.getScrollId()) + // in case of failure, report the initial exception instead of the one resulting from cleaning the scroll + .execute(ActionListener.wrap(r -> listener.onFailure(ex), e -> { + ex.addSuppressed(e); + listener.onFailure(ex); + })); + } + } + protected final void clearScroll(String scrollId, ActionListener listener) { if (scrollId != null) { client.prepareClearScroll().addScrollId(scrollId).execute( @@ -344,4 +348,4 @@ public class Scroller { listener.onFailure(ex); } } -} +} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java index 634957db206..02da2cc0da2 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/SourceGenerator.java @@ -22,8 +22,6 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ReferenceInput; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ScoreProcessorDefinition; @@ -142,52 +140,49 @@ public abstract class SourceGenerator { // sorting only works on not-analyzed fields - look for a multi-field replacement if (attr instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) attr; - attr = fa.isAnalyzed() ? fa.notAnalyzedAttribute() : attr; - } + fa = fa.isInexact() ? fa.exactAttribute() : fa; - // top-level doc value - if (attr instanceof RootFieldAttribute) { - sortBuilder = fieldSort(((RootFieldAttribute) attr).name()); - } - if (attr instanceof NestedFieldAttribute) { - NestedFieldAttribute nfa = (NestedFieldAttribute) attr; - FieldSortBuilder fieldSort = fieldSort(nfa.name()); - - String nestedPath = nfa.parentPath(); - NestedSortBuilder newSort = new NestedSortBuilder(nestedPath); - NestedSortBuilder nestedSort = fieldSort.getNestedSort(); - - if (nestedSort == null) { - fieldSort.setNestedSort(newSort); + sortBuilder = fieldSort(fa.name()); + if (!fa.isNested()) { + sortBuilder = fieldSort(fa.name()); } else { - for (; nestedSort.getNestedSort() != null; nestedSort = nestedSort.getNestedSort()) { - } - nestedSort.setNestedSort(newSort); - } + FieldSortBuilder fieldSort = fieldSort(fa.name()); + String nestedPath = fa.nestedParent().path(); + NestedSortBuilder newSort = new NestedSortBuilder(nestedPath); + NestedSortBuilder nestedSort = fieldSort.getNestedSort(); - nestedSort = newSort; - - List nestedQuery = new ArrayList<>(1); - - // copy also the nested queries fr(if any) - if (container.query() != null) { - container.query().forEachDown(nq -> { - // found a match - if (nestedPath.equals(nq.path())) { - // get the child query - the nested wrapping and inner hits are not needed - nestedQuery.add(nq.child().asBuilder()); + if (nestedSort == null) { + fieldSort.setNestedSort(newSort); + } else { + for (; nestedSort.getNestedSort() != null; nestedSort = nestedSort.getNestedSort()) { } - }, NestedQuery.class); - } - - if (nestedQuery.size() > 0) { - if (nestedQuery.size() > 1) { - throw new SqlIllegalArgumentException("nested query should have been grouped in one place"); + nestedSort.setNestedSort(newSort); } - nestedSort.setFilter(nestedQuery.get(0)); - } - sortBuilder = fieldSort; + nestedSort = newSort; + + List nestedQuery = new ArrayList<>(1); + + // copy also the nested queries fr(if any) + if (container.query() != null) { + container.query().forEachDown(nq -> { + // found a match + if (nestedPath.equals(nq.path())) { + // get the child query - the nested wrapping and inner hits are not needed + nestedQuery.add(nq.child().asBuilder()); + } + }, NestedQuery.class); + } + + if (nestedQuery.size() > 0) { + if (nestedQuery.size() > 1) { + throw new SqlIllegalArgumentException("nested query should have been grouped in one place"); + } + nestedSort.setFilter(nestedQuery.get(0)); + } + + sortBuilder = fieldSort; + } } } else if (sortable instanceof ScriptSort) { ScriptSort ss = (ScriptSort) sortable; @@ -212,4 +207,4 @@ public abstract class SourceGenerator { source.storedFields(NO_STORED_FIELD); } } -} +} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingHitExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingHitExtractor.java index 6976884e0bc..04b17d0a86e 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingHitExtractor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ComputingHitExtractor.java @@ -61,7 +61,7 @@ public class ComputingHitExtractor implements HitExtractor { } @Override - public String innerHitName() { + public String hitName() { return null; } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ConstantExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ConstantExtractor.java index f01ffb7b498..6c93016d9af 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ConstantExtractor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ConstantExtractor.java @@ -47,7 +47,7 @@ public class ConstantExtractor implements HitExtractor { } @Override - public String innerHitName() { + public String hitName() { return null; } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractor.java deleted file mode 100644 index d0c4752d791..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractor.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.SearchHit; -import org.joda.time.ReadableInstant; - -import java.io.IOException; - -/** - * Extracts field values from {@link SearchHit#field(String)}. - */ -public class DocValueExtractor implements HitExtractor { - /** - * Stands for {@code doc_value}. We try to use short names for {@link HitExtractor}s - * to save a few bytes when when we send them back to the user. - */ - static final String NAME = "d"; - private final String fieldName; - - public DocValueExtractor(String name) { - this.fieldName = name; - } - - DocValueExtractor(StreamInput in) throws IOException { - fieldName = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public Object get(SearchHit hit) { - // TODO we should think about what to do with multi-valued fields. - // Tracked by https://github.com/elastic/x-pack-elasticsearch/issues/2874 - DocumentField field = hit.field(fieldName); - if (field != null) { - Object value = field.getValue(); - if (value != null && value instanceof ReadableInstant) { - return ((ReadableInstant) value).getMillis(); - } else { - return value; - } - } else { - return null; - } - } - - @Override - public String innerHitName() { - return null; - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - DocValueExtractor other = (DocValueExtractor) obj; - return fieldName.equals(other.fieldName); - } - - @Override - public int hashCode() { - return fieldName.hashCode(); - } - - @Override - public String toString() { - /* % kind of looks like two 0s with a column separator between - * them so it makes me think of columnar storage which doc - * values are. */ - return "%" + fieldName; - } -} diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java new file mode 100644 index 00000000000..1044eb2d90c --- /dev/null +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractor.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.execution.search.extractor; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.xpack.sql.execution.ExecutionException; +import org.joda.time.ReadableInstant; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Extractor for ES fields. Works for both 'normal' fields but also nested ones (which require hitName to be set). + * The latter is used as metadata in assembling the results in the tabular response. + */ +public class FieldHitExtractor implements HitExtractor { + + private static final boolean ARRAYS_LENIENCY = false; + + /** + * Stands for {@code field}. We try to use short names for {@link HitExtractor}s + * to save a few bytes when when we send them back to the user. + */ + static final String NAME = "f"; + + private final String fieldName, hitName; + private final boolean useDocValue; + private final String[] path; + + public FieldHitExtractor(String name, boolean useDocValue) { + this(name, useDocValue, null); + } + + public FieldHitExtractor(String name, boolean useDocValue, String hitName) { + this.fieldName = name; + this.useDocValue = useDocValue; + this.hitName = hitName; + this.path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, "."); + } + + FieldHitExtractor(StreamInput in) throws IOException { + fieldName = in.readString(); + useDocValue = in.readBoolean(); + hitName = in.readOptionalString(); + path = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, "."); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeBoolean(useDocValue); + out.writeOptionalString(hitName); + } + + @Override + public Object get(SearchHit hit) { + Object value = null; + if (useDocValue) { + DocumentField field = hit.field(fieldName); + if (field != null) { + checkMultiValue(field.getValues()); + value = field.getValue(); + if (value instanceof ReadableInstant) { + value = ((ReadableInstant) value).getMillis(); + } + } + } else { + Map source = hit.getSourceAsMap(); + if (source != null) { + value = extractFromSource(source); + } + } + return value; + } + + private void checkMultiValue(Object values) { + if (!ARRAYS_LENIENCY && values != null && values instanceof List && ((List) values).size() > 1) { + throw new ExecutionException("Arrays (returned by [%s]) are not supported", fieldName); + } + } + + @SuppressWarnings("unchecked") + Object extractFromSource(Map map) { + Object value = null; + // each node is a key inside the map + for (String node : path) { + // if it's not the first step, start unpacking + if (value != null) { + if (value instanceof Map) { + map = (Map) value; + } else { + throw new ExecutionException("Cannot extract value [%s] from source", fieldName); + } + } + value = map.get(node); + } + checkMultiValue(value); + return value; + } + + @Override + public String hitName() { + return hitName; + } + + public String fieldName() { + return fieldName; + } + + @Override + public String toString() { + return fieldName + "@" + hitName; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + FieldHitExtractor other = (FieldHitExtractor) obj; + return fieldName.equals(other.fieldName) + && hitName.equals(other.hitName) + && useDocValue == other.useDocValue; + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, useDocValue, hitName); + } +} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractor.java index a3936b9fe16..64973dfe02c 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractor.java @@ -22,5 +22,5 @@ public interface HitExtractor extends NamedWriteable { * Name of the inner hit needed by this extractor if it needs one, {@code null} otherwise. */ @Nullable - String innerHitName(); + String hitName(); } \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractors.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractors.java index bf827e945ec..cc904c98c2a 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractors.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/HitExtractors.java @@ -20,9 +20,7 @@ public abstract class HitExtractors { public static List getNamedWriteables() { List entries = new ArrayList<>(); entries.add(new Entry(HitExtractor.class, ConstantExtractor.NAME, ConstantExtractor::new)); - entries.add(new Entry(HitExtractor.class, DocValueExtractor.NAME, DocValueExtractor::new)); - entries.add(new Entry(HitExtractor.class, InnerHitExtractor.NAME, InnerHitExtractor::new)); - entries.add(new Entry(HitExtractor.class, SourceExtractor.NAME, SourceExtractor::new)); + entries.add(new Entry(HitExtractor.class, FieldHitExtractor.NAME, FieldHitExtractor::new)); entries.add(new Entry(HitExtractor.class, ComputingHitExtractor.NAME, ComputingHitExtractor::new)); entries.add(new Entry(HitExtractor.class, ScoreExtractor.NAME, in -> ScoreExtractor.INSTANCE)); entries.addAll(Processors.getNamedWriteables()); diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractor.java deleted file mode 100644 index 58bc486e6d6..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractor.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.sql.execution.ExecutionException; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -public class InnerHitExtractor implements HitExtractor { - /** - * Stands for {@code inner}. We try to use short names for {@link HitExtractor}s - * to save a few bytes when when we send them back to the user. - */ - static final String NAME = "i"; - private final String hitName, fieldName; - private final boolean useDocValue; - private final String[] tree; - - public InnerHitExtractor(String hitName, String name, boolean useDocValue) { - this.hitName = hitName; - this.fieldName = name; - this.useDocValue = useDocValue; - this.tree = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(name, "."); - } - - InnerHitExtractor(StreamInput in) throws IOException { - hitName = in.readString(); - fieldName = in.readString(); - useDocValue = in.readBoolean(); - tree = useDocValue ? Strings.EMPTY_ARRAY : Strings.tokenizeToStringArray(fieldName, "."); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(hitName); - out.writeString(fieldName); - out.writeBoolean(useDocValue); - } - - @SuppressWarnings("unchecked") - @Override - public Object get(SearchHit hit) { - if (useDocValue) { - DocumentField field = hit.field(fieldName); - return field != null ? field.getValue() : null; - } - else { - Map source = hit.getSourceAsMap(); - if (source == null) { - return null; - } - Object value = null; - for (String node : tree) { - if (value != null) { - if (value instanceof Map) { - source = (Map) value; - } - else { - throw new ExecutionException("Cannot extract value %s from source", fieldName); - } - } - value = source.get(node); - } - return value; - } - } - - @Override - public String innerHitName() { - return hitName; - } - - String fieldName() { - return fieldName; - } - - public String hitName() { - return hitName; - } - - @Override - public String toString() { - return fieldName + "@" + hitName; - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - InnerHitExtractor other = (InnerHitExtractor) obj; - return fieldName.equals(other.fieldName) - && hitName.equals(other.hitName) - && useDocValue == other.useDocValue; - } - - @Override - public int hashCode() { - return Objects.hash(hitName, fieldName, useDocValue); - } -} diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractor.java index 515ec8009e7..6449014c002 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/ScoreExtractor.java @@ -39,7 +39,7 @@ public class ScoreExtractor implements HitExtractor { } @Override - public String innerHitName() { + public String hitName() { return null; } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractor.java deleted file mode 100644 index d99f8fa1b5f..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractor.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.SearchHit; - -import java.io.IOException; -import java.util.Map; - -public class SourceExtractor implements HitExtractor { - /** - * Stands for {@code _source}. We try to use short names for {@link HitExtractor}s - * to save a few bytes when when we send them back to the user. - */ - public static final String NAME = "s"; - private final String fieldName; - - public SourceExtractor(String name) { - this.fieldName = name; - } - - SourceExtractor(StreamInput in) throws IOException { - fieldName = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public Object get(SearchHit hit) { - Map source = hit.getSourceAsMap(); - // TODO I think this will not work with dotted field names (objects or actual dots in the names) - // confusingly, I think this is actually handled by InnerHitExtractor. This needs investigating or renaming - // Tracked by https://github.com/elastic/x-pack-elasticsearch/issues/2874 - return source != null ? source.get(fieldName) : null; - } - - @Override - public String innerHitName() { - return null; - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - SourceExtractor other = (SourceExtractor) obj; - return fieldName.equals(other.fieldName); - } - - @Override - public int hashCode() { - return fieldName.hashCode(); - } - - @Override - public String toString() { - /* # is sometimes known as the "hash" sign which reminds - * me of a hash table lookup. */ - return "#" + fieldName; - } -} diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java index 0899b84e01a..e2bd7726e09 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/Alias.java @@ -32,7 +32,7 @@ public class Alias extends NamedExpression { public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id) { this(location, name, qualifier, child, id, false); } - + public Alias(Location location, String name, String qualifier, Expression child, ExpressionId id, boolean synthetic) { super(location, name, singletonList(child), id, synthetic); this.child = child; @@ -68,13 +68,13 @@ public class Alias extends NamedExpression { private Attribute createAttribute() { if (resolved()) { Expression c = child(); - + Attribute attr = Expressions.attribute(c); if (attr != null) { return attr.clone(location(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic()); } else { - return new RootFieldAttribute(location(), name(), child.dataType(), qualifier, child.nullable(), id(), synthetic()); + return new FieldAttribute(location(), null, name(), child.dataType(), qualifier, child.nullable(), id(), synthetic()); } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java index d6783f6f423..7192f8eed3a 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/FieldAttribute.java @@ -5,50 +5,127 @@ */ package org.elasticsearch.xpack.sql.expression; -import java.util.Map; -import java.util.Map.Entry; - import org.elasticsearch.xpack.sql.analysis.index.MappingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.TextType; +import org.elasticsearch.xpack.sql.type.KeywordType; +import org.elasticsearch.xpack.sql.type.NestedType; +import org.elasticsearch.xpack.sql.type.StringType; +import org.elasticsearch.xpack.sql.util.StringUtils; -public abstract class FieldAttribute extends TypedAttribute { +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; - FieldAttribute(Location location, String name, DataType dataType) { - this(location, name, dataType, null, true, null, false); +/** + * Attribute for an ES field. + * To differentiate between the different type of fields this class offers: + * - name - the fully qualified name (foo.bar.tar) + * - path - the path pointing to the field name (foo.bar) + * - parent - the immediate parent of the field; useful for figuring out the type of field (nested vs object) + * - nestedParent - if nested, what's the parent (which might not be the immediate one) + */ +public class FieldAttribute extends TypedAttribute { + + private final FieldAttribute parent; + private final FieldAttribute nestedParent; + private final String path; + + public FieldAttribute(Location location, String name, DataType dataType) { + this(location, null, name, dataType); } - FieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType) { + this(location, parent, name, dataType, null, true, null, false); + } + + public FieldAttribute(Location location, FieldAttribute parent, String name, DataType dataType, String qualifier, + boolean nullable, ExpressionId id, boolean synthetic) { super(location, name, dataType, qualifier, nullable, id, synthetic); - } + this.path = parent != null ? parent.name() : StringUtils.EMPTY; + this.parent = parent; - public boolean isAnalyzed() { - return dataType() instanceof TextType; - } - - public FieldAttribute notAnalyzedAttribute() { - if (isAnalyzed()) { - Map docValueFields = ((TextType) dataType()).docValueFields(); - if (docValueFields.size() == 1) { - Entry entry = docValueFields.entrySet().iterator().next(); - return with(entry.getKey(), entry.getValue()); + // figure out the last nested parent + FieldAttribute nestedPar = null; + if (parent != null) { + nestedPar = parent.nestedParent; + if (parent.dataType() instanceof NestedType) { + nestedPar = parent; } - if (docValueFields.isEmpty()) { + } + this.nestedParent = nestedPar; + } + + public FieldAttribute parent() { + return parent; + } + + public String path() { + return path; + } + + public String qualifiedPath() { + return qualifier() != null ? qualifier() + "." + path : path; + } + + public boolean isNested() { + return nestedParent != null; + } + + public FieldAttribute nestedParent() { + return nestedParent; + } + + public boolean isInexact() { + return (dataType() instanceof StringType && ((StringType) dataType()).isInexact()); + } + + public FieldAttribute exactAttribute() { + if (isInexact()) { + Map exactFields = ((StringType) dataType()).exactKeywords(); + if (exactFields.size() == 1) { + Entry entry = exactFields.entrySet().iterator().next(); + return innerField(entry.getKey(), entry.getValue()); + } + if (exactFields.isEmpty()) { throw new MappingException("No docValue multi-field defined for %s", name()); } - if (docValueFields.size() > 1) { - DataType dataType = docValueFields.get("keyword"); - if (dataType != null && dataType.hasDocValues()) { - return with("keyword", dataType); - } - throw new MappingException("Default 'keyword' not available as multi-fields and multiple options available for %s", name()); + // pick the default - keyword + if (exactFields.size() > 1) { + throw new MappingException("Multiple exact keyword candidates %s available for %s; specify which one to use", + exactFields.keySet(), name()); } } return this; } - protected FieldAttribute with(String subFieldName, DataType type) { - return (FieldAttribute) clone(location(), name() + "." + subFieldName, type, qualifier(), nullable(), id(), synthetic()); + private FieldAttribute innerField(String subFieldName, DataType type) { + return new FieldAttribute(location(), this, name() + "." + subFieldName, type, qualifier(), nullable(), id(), synthetic()); } -} + + @Override + protected Expression canonicalize() { + return new FieldAttribute(location(), null, "", dataType(), null, true, id(), false); + } + + @Override + protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { + FieldAttribute qualifiedParent = parent != null ? (FieldAttribute) parent.withQualifier(qualifier) : null; + return new FieldAttribute(location, qualifiedParent, name, dataType, qualifier, nullable, id, synthetic); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), path); + } + + @Override + public boolean equals(Object obj) { + return super.equals(obj) && Objects.equals(path, ((FieldAttribute) obj).path); + } + + @Override + protected String label() { + return "f"; + } +} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/NestedFieldAttribute.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/NestedFieldAttribute.java deleted file mode 100644 index 744d1637f5c..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/NestedFieldAttribute.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression; - -import java.util.List; - -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.util.StringUtils; - -import static java.util.Collections.emptyList; - -public class NestedFieldAttribute extends FieldAttribute { - - private final List parents; - private final String parentPath; - - public NestedFieldAttribute(Location location, String name, DataType dataType, List parents) { - this(location, name, dataType, null, true, null, false, parents); - } - - public NestedFieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic, List parents) { - super(location, name, dataType, qualifier, nullable, id, synthetic); - this.parents = parents == null || parents.isEmpty() ? emptyList() : parents; - this.parentPath = StringUtils.concatWithDot(parents); - } - - public List parents() { - return parents; - } - - public String parentPath() { - return parentPath; - } - - @Override - protected Expression canonicalize() { - return new NestedFieldAttribute(location(), "", dataType(), null, true, id(), false, emptyList()); - } - - @Override - protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - return new NestedFieldAttribute(location, name, dataType, qualifier, nullable, id, synthetic, parents); - } - - @Override - public String toString() { - if (parents.size() > 0) { - return name().replace('.', '>') + "#" + id(); - } - return super.toString(); - } - - @Override - protected String label() { - return "n"; - } -} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/RootFieldAttribute.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/RootFieldAttribute.java deleted file mode 100644 index ec2a24de03f..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/RootFieldAttribute.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.expression; - -import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.type.DataType; - -public class RootFieldAttribute extends FieldAttribute { - - public RootFieldAttribute(Location location, String name, DataType dataType) { - this(location, name, dataType, null, true, null, false); - } - - public RootFieldAttribute(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - super(location, name, dataType, qualifier, nullable, id, synthetic); - } - - @Override - protected Expression canonicalize() { - return new RootFieldAttribute(location(), "", dataType(), null, true, id(), false); - } - - @Override - protected Attribute clone(Location location, String name, DataType dataType, String qualifier, boolean nullable, ExpressionId id, boolean synthetic) { - return new RootFieldAttribute(location, name, dataType, qualifier, nullable, id, synthetic); - } - - @Override - protected String label() { - return "r"; - } -} \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java index 0a3f663ca08..ebd66f0ad0e 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/TypedAttribute.java @@ -35,11 +35,6 @@ public abstract class TypedAttribute extends Attribute { @Override public boolean equals(Object obj) { - if (super.equals(obj)) { - TypedAttribute other = (TypedAttribute) obj; - return Objects.equals(dataType, other.dataType); - } - - return false; + return super.equals(obj) && Objects.equals(dataType, ((TypedAttribute) obj).dataType); } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java index 159649b2f2e..b87bd35cba3 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedAttribute.java @@ -17,9 +17,11 @@ import java.util.Objects; import static java.lang.String.format; +// unfortunately we can't use UnresolvedNamedExpression public class UnresolvedAttribute extends Attribute implements Unresolvable { private final String unresolvedMsg; + private final boolean customMessage; private final Object resolutionMetadata; public UnresolvedAttribute(Location location, String name) { @@ -36,6 +38,7 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable { public UnresolvedAttribute(Location location, String name, String qualifier, ExpressionId id, String unresolvedMessage, Object resolutionMetadata) { super(location, name, qualifier, id); + this.customMessage = unresolvedMessage != null; this.unresolvedMsg = unresolvedMessage == null ? errorMessage(qualifiedName(), null) : unresolvedMessage; this.resolutionMetadata = resolutionMetadata; } @@ -45,6 +48,10 @@ public class UnresolvedAttribute extends Attribute implements Unresolvable { return resolutionMetadata; } + public boolean customMessage() { + return customMessage; + } + @Override public boolean resolved() { return false; diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java index 7e43cb9e82c..a4da706ae7e 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/expression/UnresolvedStar.java @@ -14,7 +14,7 @@ import static java.util.Collections.emptyList; public class UnresolvedStar extends UnresolvedNamedExpression { - // typically used for nested fields + // typically used for nested fields or inner/dotted fields private final UnresolvedAttribute qualifier; public UnresolvedStar(Location location, UnresolvedAttribute qualifier) { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java index f179a2c8d30..bfb7254f5ef 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/optimizer/Optimizer.java @@ -16,9 +16,9 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.ExpressionSet; import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; import org.elasticsearch.xpack.sql.expression.Order; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.FunctionAttribute; @@ -95,19 +95,19 @@ public class Optimizer extends RuleExecutor { @Override protected Iterable.Batch> batches() { - Batch resolution = new Batch("Finish Analysis", + Batch resolution = new Batch("Finish Analysis", new PruneSubqueryAliases(), CleanAliases.INSTANCE ); - Batch aggregate = new Batch("Aggregation", + Batch aggregate = new Batch("Aggregation", new PruneDuplicatesInGroupBy(), new ReplaceDuplicateAggsWithReferences(), new ReplaceAggsWithMatrixStats(), new ReplaceAggsWithExtendedStats(), new ReplaceAggsWithStats(), - new PromoteStatsToExtendedStats(), - new ReplaceAggsWithPercentiles(), + new PromoteStatsToExtendedStats(), + new ReplaceAggsWithPercentiles(), new ReplceAggsWithPercentileRanks() ); @@ -134,10 +134,10 @@ public class Optimizer extends RuleExecutor { new SkipQueryOnLimitZero(), new SkipQueryIfFoldingProjection() ); - //new BalanceBooleanTrees()); + //new BalanceBooleanTrees()); Batch label = new Batch("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - + return Arrays.asList(resolution, aggregate, operators, local, label); } @@ -157,7 +157,7 @@ public class Optimizer extends RuleExecutor { static class CleanAliases extends OptimizerRule { private static final CleanAliases INSTANCE = new CleanAliases(); - + CleanAliases() { super(TransformDirection.UP); } @@ -308,7 +308,7 @@ public class Optimizer extends RuleExecutor { return p; } - // update old agg attributes + // update old agg attributes return ReplaceAggsWithStats.updateAggAttributes(p, promotedFunctionIds); } @@ -360,12 +360,12 @@ public class Optimizer extends RuleExecutor { Map potentialPromotions = new LinkedHashMap<>(); p.forEachExpressionsUp(e -> collect(e, potentialPromotions)); - + // no promotions found - skip if (potentialPromotions.isEmpty()) { return p; } - + // start promotion // old functionId to new function attribute @@ -431,13 +431,13 @@ public class Optimizer extends RuleExecutor { // 2a. collect ScalarFunctions that unwrapped refer to any of the updated aggregates // 2b. replace any of the old ScalarFunction attributes - + final Set newAggIds = new LinkedHashSet<>(promotedFunctionIds.size()); - + for (AggregateFunctionAttribute afa : promotedFunctionIds.values()) { newAggIds.add(afa.functionId()); } - + final Map updatedScalarAttrs = new LinkedHashMap<>(); final Map updatedScalarAliases = new LinkedHashMap<>(); @@ -452,7 +452,7 @@ public class Optimizer extends RuleExecutor { sfa = updatedScalarAliases.getOrDefault(sfa.id(), sfa); return sfa; } - + // unwrap aliases as they 'hide' functions under their own attributes if (e instanceof Alias) { Attribute att = Expressions.attribute(e); @@ -500,7 +500,7 @@ public class Optimizer extends RuleExecutor { return e; } } - + static class PromoteStatsToExtendedStats extends Rule { @Override @@ -709,8 +709,11 @@ public class Optimizer extends RuleExecutor { for (Order order : ob.order()) { Attribute attr = ((NamedExpression) order.child()).toAttribute(); - if (attr instanceof NestedFieldAttribute) { - nestedOrders.put(((NestedFieldAttribute) attr).parentPath(), order); + if (attr instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) attr; + if (fa.isNested()) { + nestedOrders.put(fa.nestedParent().name(), order); + } } } @@ -723,8 +726,11 @@ public class Optimizer extends RuleExecutor { List nestedTopFields = new ArrayList<>(); for (Attribute attr : project.output()) { - if (attr instanceof NestedFieldAttribute) { - nestedTopFields.add(((NestedFieldAttribute) attr).parentPath()); + if (attr instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) attr; + if (fa.isNested()) { + nestedTopFields.add(fa.nestedParent().name()); + } } } @@ -933,7 +939,7 @@ public class Optimizer extends RuleExecutor { return project; } - // normally only the upper projections should survive but since the lower list might have aliases definitions + // normally only the upper projections should survive but since the lower list might have aliases definitions // that might be reused by the upper one, these need to be replaced. // for example an alias defined in the lower list might be referred in the upper - without replacing it the alias becomes invalid private List combineProjections(List upper, List lower) { @@ -948,7 +954,7 @@ public class Optimizer extends RuleExecutor { AttributeMap aliases = new AttributeMap<>(map); List replaced = new ArrayList<>(); - + // replace any matching attribute with a lower alias (if there's a match) // but clean-up non-top aliases at the end for (NamedExpression ne : upper) { @@ -956,7 +962,7 @@ public class Optimizer extends RuleExecutor { Alias as = aliases.get(a); return as != null ? as : a; }, Attribute.class); - + replaced.add((NamedExpression) CleanAliases.trimNonTopLevelAliases(replacedExp)); } return replaced; @@ -991,13 +997,13 @@ public class Optimizer extends RuleExecutor { } } }, Project.class); - + if (attrs.isEmpty()) { return plan; } - + AtomicBoolean stop = new AtomicBoolean(false); - + // propagate folding up to unary nodes // anything higher and the propagate stops plan = plan.transformUp(p -> { @@ -1021,12 +1027,12 @@ public class Optimizer extends RuleExecutor { return p; }); - + // finally clean-up aliases return CleanAliases.INSTANCE.apply(plan); - + } - + private boolean canPropagateFoldable(LogicalPlan p) { return p instanceof Project || p instanceof Filter || p instanceof SubQueryAlias || p instanceof Aggregate || p instanceof Limit || p instanceof OrderBy; } @@ -1061,7 +1067,7 @@ public class Optimizer extends RuleExecutor { } return e; } - + private Expression fold(Expression e) { // literals are always foldable, so avoid creating a duplicate if (e.foldable() && !(e instanceof Literal)) { @@ -1070,7 +1076,7 @@ public class Optimizer extends RuleExecutor { return e; } } - + static class BooleanSimplification extends OptimizerExpressionRule { BooleanSimplification() { @@ -1168,7 +1174,7 @@ public class Optimizer extends RuleExecutor { return combineAnd(combine(common, new Or(combineLeft.location(), combineLeft, combineRight))); } - // TODO: eliminate conjunction/disjunction + // TODO: eliminate conjunction/disjunction return bc; } @@ -1271,7 +1277,7 @@ public class Optimizer extends RuleExecutor { return new Range(and.location(), lb.left(), lb.right(), l instanceof GreaterThanOrEqual, rb.right(), r instanceof LessThanOrEqual); } - // />= + // />= else if ((r instanceof GreaterThan || r instanceof GreaterThanOrEqual) && (l instanceof LessThan || l instanceof LessThanOrEqual)) { return new Range(and.location(), rb.left(), rb.right(), r instanceof GreaterThanOrEqual, lb.right(), @@ -1289,7 +1295,7 @@ public class Optimizer extends RuleExecutor { @Override protected LogicalPlan rule(Limit limit) { if (limit.limit() instanceof Literal) { - if (Integer.valueOf(0).equals((Number) (((Literal) limit.limit()).fold()))) { + if (Integer.valueOf(0).equals((((Literal) limit.limit()).fold()))) { return new LocalRelation(limit.location(), new EmptyExecutable(limit.output())); } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index dae65a10049..4f570eb8363 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -46,7 +46,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; @@ -71,7 +70,6 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StarContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext; -import org.elasticsearch.xpack.sql.plan.TableIdentifier; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; @@ -119,42 +117,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder { @Override public Expression visitStar(StarContext ctx) { - return new UnresolvedStar(source(ctx), ctx.qualifier != null ? visitColumnExpression(ctx.qualifier) : null); - } - - @Override - public Object visitDereference(DereferenceContext ctx) { - String fieldName = visitIdentifier(ctx.fieldName); - String qualifier = null; - Expression base = expression(ctx.base); - if (base != null) { - if (base instanceof UnresolvedAttribute) { - UnresolvedAttribute b = (UnresolvedAttribute) base; - return new UnresolvedAttribute(source(ctx), b.name() + "." + fieldName, b.qualifier()); - } - else { - throw new UnsupportedOperationException(format(Locale.ROOT, "Uknown dereferencing using %s ", base.getClass())); - } - } - return new UnresolvedAttribute(source(ctx), fieldName, qualifier); - } - - @Override - public UnresolvedAttribute visitColumnExpression(ColumnExpressionContext ctx) { - String qualifier = null; - if (ctx.alias != null) { - qualifier = visitIdentifier(ctx.alias); - } - else if (ctx.table != null) { - TableIdentifier table = visitTableIdentifier(ctx.table); - qualifier = table.index(); - } - return new UnresolvedAttribute(source(ctx), visitIdentifier(ctx.name), qualifier); + return new UnresolvedStar(source(ctx), ctx.qualifiedName() != null ? + new UnresolvedAttribute(source(ctx.qualifiedName()), visitQualifiedName(ctx.qualifiedName())) : null); } @Override public Object visitColumnReference(ColumnReferenceContext ctx) { - return visitColumnExpression(ctx.columnExpression()); + return new UnresolvedAttribute(source(ctx), visitIdentifier(ctx.identifier())); + } + + @Override + public Object visitDereference(DereferenceContext ctx) { + return new UnresolvedAttribute(source(ctx), visitQualifiedName(ctx.qualifiedName())); } @Override @@ -217,7 +191,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder { case SqlBaseParser.RLIKE: e = new RLike(loc, exp, expression(pCtx.pattern)); break; - case SqlBaseParser.NULL:; + case SqlBaseParser.NULL: // shortcut to avoid double negation later on (since there's no IsNull (missing in ES is a negated exists)) e = new IsNotNull(loc, exp); return pCtx.NOT() != null ? e : new Not(loc, e); diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java index 7099b3100be..965333b1709 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilder.java @@ -21,7 +21,7 @@ abstract class IdentifierBuilder extends AbstractBuilder { @Override public TableIdentifier visitTableIdentifier(TableIdentifierContext ctx) { String index = text(ctx.index); - + Location source = source(ctx); validateIndex(index, source); @@ -29,14 +29,14 @@ abstract class IdentifierBuilder extends AbstractBuilder { } // see https://github.com/elastic/elasticsearch/issues/6736 - private static void validateIndex(String index, Location source) { + static void validateIndex(String index, Location source) { for (int i = 0; i < index.length(); i++) { char c = index.charAt(i); if (Character.isUpperCase(c)) { throw new ParsingException(source, format(Locale.ROOT, "Invalid index name (needs to be lowercase) %s", index)); } - if (c == '.' || c == '\\' || c == '/' || c == '*' || c == '?' || c == '<' || c == '>' || c == '|' || c == ',') { - throw new ParsingException(source, format(Locale.ROOT, "Illegal character %c in index name %s", c, index)); + if (c == '\\' || c == '/' || c == '<' || c == '>' || c == '|' || c == ',' || c == ' ') { + throw new ParsingException(source, format(Locale.ROOT, "Invalid index name (illegal character %c) %s", c, index)); } } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 4fdc1fcb564..a8c6bce1104 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -640,18 +640,6 @@ class SqlBaseBaseListener implements SqlBaseListener { *

The default implementation does nothing.

*/ @Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void enterColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { } - /** - * {@inheritDoc} - * - *

The default implementation does nothing.

- */ - @Override public void exitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index f317237eae1..81b4df555d0 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -380,13 +380,6 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } - /** - * {@inheritDoc} - * - *

The default implementation returns the result of calling - * {@link #visitChildren} on {@code ctx}.

- */ - @Override public T visitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index 595f651cd17..2589ae51de5 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -30,11 +30,11 @@ class SqlBaseLexer extends Lexer { MAPPED=39, MATCH=40, NATURAL=41, NOT=42, NULL=43, ON=44, OPTIMIZED=45, OR=46, ORDER=47, OUTER=48, PARSED=49, PHYSICAL=50, PLAN=51, QUERY=52, RIGHT=53, RLIKE=54, SCHEMAS=55, SELECT=56, SHOW=57, TABLES=58, TEXT=59, - TRUE=60, USING=61, VERIFY=62, WHEN=63, WHERE=64, WITH=65, EQ=66, NEQ=67, - LT=68, LTE=69, GT=70, GTE=71, PLUS=72, MINUS=73, ASTERISK=74, SLASH=75, - PERCENT=76, CONCAT=77, STRING=78, INTEGER_VALUE=79, DECIMAL_VALUE=80, - IDENTIFIER=81, DIGIT_IDENTIFIER=82, QUOTED_IDENTIFIER=83, BACKQUOTED_IDENTIFIER=84, - SIMPLE_COMMENT=85, BRACKETED_COMMENT=86, WS=87, UNRECOGNIZED=88; + TRUE=60, USING=61, VERIFY=62, WHERE=63, WITH=64, EQ=65, NEQ=66, LT=67, + LTE=68, GT=69, GTE=70, PLUS=71, MINUS=72, ASTERISK=73, SLASH=74, PERCENT=75, + CONCAT=76, STRING=77, INTEGER_VALUE=78, DECIMAL_VALUE=79, IDENTIFIER=80, + DIGIT_IDENTIFIER=81, QUOTED_IDENTIFIER=82, BACKQUOTED_IDENTIFIER=83, SIMPLE_COMMENT=84, + BRACKETED_COMMENT=85, WS=86, UNRECOGNIZED=87; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -47,9 +47,9 @@ class SqlBaseLexer extends Lexer { "IS", "JOIN", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "QUERY", "RIGHT", "RLIKE", "SCHEMAS", "SELECT", "SHOW", "TABLES", - "TEXT", "TRUE", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", + "TEXT", "TRUE", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; @@ -64,8 +64,8 @@ class SqlBaseLexer extends Lexer { "'NULL'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'QUERY'", "'RIGHT'", "'RLIKE'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'TABLES'", "'TEXT'", "'TRUE'", "'USING'", "'VERIFY'", - "'WHEN'", "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", "'||'" + "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", "'||'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -75,9 +75,9 @@ class SqlBaseLexer extends Lexer { "IS", "JOIN", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "QUERY", "RIGHT", "RLIKE", "SCHEMAS", "SELECT", "SHOW", "TABLES", - "TEXT", "TRUE", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", + "TEXT", "TRUE", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; @@ -136,7 +136,7 @@ class SqlBaseLexer extends Lexer { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2Z\u02f5\b\1\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2Y\u02ee\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -146,252 +146,250 @@ class SqlBaseLexer extends Lexer { "\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t="+ "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ - "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\3\2\3\2\3\3\3\3\3\4"+ - "\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3"+ - "\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3"+ - "\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17"+ - "\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23"+ - "\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25"+ - "\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26"+ - "\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32"+ - "\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34"+ - "\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36"+ - "\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3"+ - " \3 \3 \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3$\3$\3$\3$\3$\3%\3"+ - "%\3%\3%\3%\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3"+ - "(\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3,\3"+ - ",\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3\60"+ - "\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62"+ - "\3\62\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64"+ - "\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67"+ - "\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\38\38\39\39\39\39\39\39\3"+ - "9\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3"+ - ">\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3"+ - "A\3B\3B\3B\3B\3B\3C\3C\3D\3D\3D\3D\3D\3D\3D\5D\u0241\nD\3E\3E\3F\3F\3"+ - "F\3G\3G\3H\3H\3H\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3N\3N\3N\3O\3O\3O\3O\7"+ - "O\u025e\nO\fO\16O\u0261\13O\3O\3O\3P\6P\u0266\nP\rP\16P\u0267\3Q\6Q\u026b"+ - "\nQ\rQ\16Q\u026c\3Q\3Q\7Q\u0271\nQ\fQ\16Q\u0274\13Q\3Q\3Q\6Q\u0278\nQ"+ - "\rQ\16Q\u0279\3Q\6Q\u027d\nQ\rQ\16Q\u027e\3Q\3Q\7Q\u0283\nQ\fQ\16Q\u0286"+ - "\13Q\5Q\u0288\nQ\3Q\3Q\3Q\3Q\6Q\u028e\nQ\rQ\16Q\u028f\3Q\3Q\5Q\u0294\n"+ - "Q\3R\3R\5R\u0298\nR\3R\3R\3R\7R\u029d\nR\fR\16R\u02a0\13R\3S\3S\3S\3S"+ - "\6S\u02a6\nS\rS\16S\u02a7\3T\3T\3T\3T\7T\u02ae\nT\fT\16T\u02b1\13T\3T"+ - "\3T\3U\3U\3U\3U\7U\u02b9\nU\fU\16U\u02bc\13U\3U\3U\3V\3V\5V\u02c2\nV\3"+ - "V\6V\u02c5\nV\rV\16V\u02c6\3W\3W\3X\3X\3Y\3Y\3Y\3Y\7Y\u02d1\nY\fY\16Y"+ - "\u02d4\13Y\3Y\5Y\u02d7\nY\3Y\5Y\u02da\nY\3Y\3Y\3Z\3Z\3Z\3Z\3Z\7Z\u02e3"+ - "\nZ\fZ\16Z\u02e6\13Z\3Z\3Z\3Z\3Z\3Z\3[\6[\u02ee\n[\r[\16[\u02ef\3[\3["+ - "\3\\\3\\\3\u02e4\2]\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r"+ - "\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33"+ - "\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63"+ - "e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089"+ - "F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009d"+ - "P\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab\2\u00ad\2\u00af\2\u00b1"+ - "W\u00b3X\u00b5Y\u00b7Z\3\2\13\3\2))\5\2<\3\2\2\2\u016c\u016d\7J\2\2\u016d\u016e\7C\2\2\u016e"+ - "\u016f\7X\2\2\u016f\u0170\7K\2\2\u0170\u0171\7P\2\2\u0171\u0172\7I\2\2"+ - "\u0172@\3\2\2\2\u0173\u0174\7K\2\2\u0174\u0175\7P\2\2\u0175B\3\2\2\2\u0176"+ - "\u0177\7K\2\2\u0177\u0178\7P\2\2\u0178\u0179\7P\2\2\u0179\u017a\7G\2\2"+ - "\u017a\u017b\7T\2\2\u017bD\3\2\2\2\u017c\u017d\7K\2\2\u017d\u017e\7U\2"+ - "\2\u017eF\3\2\2\2\u017f\u0180\7L\2\2\u0180\u0181\7Q\2\2\u0181\u0182\7"+ - "K\2\2\u0182\u0183\7P\2\2\u0183H\3\2\2\2\u0184\u0185\7N\2\2\u0185\u0186"+ - "\7G\2\2\u0186\u0187\7H\2\2\u0187\u0188\7V\2\2\u0188J\3\2\2\2\u0189\u018a"+ - "\7N\2\2\u018a\u018b\7K\2\2\u018b\u018c\7M\2\2\u018c\u018d\7G\2\2\u018d"+ - "L\3\2\2\2\u018e\u018f\7N\2\2\u018f\u0190\7K\2\2\u0190\u0191\7O\2\2\u0191"+ - "\u0192\7K\2\2\u0192\u0193\7V\2\2\u0193N\3\2\2\2\u0194\u0195\7O\2\2\u0195"+ - "\u0196\7C\2\2\u0196\u0197\7R\2\2\u0197\u0198\7R\2\2\u0198\u0199\7G\2\2"+ - "\u0199\u019a\7F\2\2\u019aP\3\2\2\2\u019b\u019c\7O\2\2\u019c\u019d\7C\2"+ - "\2\u019d\u019e\7V\2\2\u019e\u019f\7E\2\2\u019f\u01a0\7J\2\2\u01a0R\3\2"+ - "\2\2\u01a1\u01a2\7P\2\2\u01a2\u01a3\7C\2\2\u01a3\u01a4\7V\2\2\u01a4\u01a5"+ - "\7W\2\2\u01a5\u01a6\7T\2\2\u01a6\u01a7\7C\2\2\u01a7\u01a8\7N\2\2\u01a8"+ - "T\3\2\2\2\u01a9\u01aa\7P\2\2\u01aa\u01ab\7Q\2\2\u01ab\u01ac\7V\2\2\u01ac"+ - "V\3\2\2\2\u01ad\u01ae\7P\2\2\u01ae\u01af\7W\2\2\u01af\u01b0\7N\2\2\u01b0"+ - "\u01b1\7N\2\2\u01b1X\3\2\2\2\u01b2\u01b3\7Q\2\2\u01b3\u01b4\7P\2\2\u01b4"+ - "Z\3\2\2\2\u01b5\u01b6\7Q\2\2\u01b6\u01b7\7R\2\2\u01b7\u01b8\7V\2\2\u01b8"+ - "\u01b9\7K\2\2\u01b9\u01ba\7O\2\2\u01ba\u01bb\7K\2\2\u01bb\u01bc\7\\\2"+ - "\2\u01bc\u01bd\7G\2\2\u01bd\u01be\7F\2\2\u01be\\\3\2\2\2\u01bf\u01c0\7"+ - "Q\2\2\u01c0\u01c1\7T\2\2\u01c1^\3\2\2\2\u01c2\u01c3\7Q\2\2\u01c3\u01c4"+ - "\7T\2\2\u01c4\u01c5\7F\2\2\u01c5\u01c6\7G\2\2\u01c6\u01c7\7T\2\2\u01c7"+ - "`\3\2\2\2\u01c8\u01c9\7Q\2\2\u01c9\u01ca\7W\2\2\u01ca\u01cb\7V\2\2\u01cb"+ - "\u01cc\7G\2\2\u01cc\u01cd\7T\2\2\u01cdb\3\2\2\2\u01ce\u01cf\7R\2\2\u01cf"+ - "\u01d0\7C\2\2\u01d0\u01d1\7T\2\2\u01d1\u01d2\7U\2\2\u01d2\u01d3\7G\2\2"+ - "\u01d3\u01d4\7F\2\2\u01d4d\3\2\2\2\u01d5\u01d6\7R\2\2\u01d6\u01d7\7J\2"+ - "\2\u01d7\u01d8\7[\2\2\u01d8\u01d9\7U\2\2\u01d9\u01da\7K\2\2\u01da\u01db"+ - "\7E\2\2\u01db\u01dc\7C\2\2\u01dc\u01dd\7N\2\2\u01ddf\3\2\2\2\u01de\u01df"+ - "\7R\2\2\u01df\u01e0\7N\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7P\2\2\u01e2"+ - "h\3\2\2\2\u01e3\u01e4\7S\2\2\u01e4\u01e5\7W\2\2\u01e5\u01e6\7G\2\2\u01e6"+ - "\u01e7\7T\2\2\u01e7\u01e8\7[\2\2\u01e8j\3\2\2\2\u01e9\u01ea\7T\2\2\u01ea"+ - "\u01eb\7K\2\2\u01eb\u01ec\7I\2\2\u01ec\u01ed\7J\2\2\u01ed\u01ee\7V\2\2"+ - "\u01eel\3\2\2\2\u01ef\u01f0\7T\2\2\u01f0\u01f1\7N\2\2\u01f1\u01f2\7K\2"+ - "\2\u01f2\u01f3\7M\2\2\u01f3\u01f4\7G\2\2\u01f4n\3\2\2\2\u01f5\u01f6\7"+ - "U\2\2\u01f6\u01f7\7E\2\2\u01f7\u01f8\7J\2\2\u01f8\u01f9\7G\2\2\u01f9\u01fa"+ - "\7O\2\2\u01fa\u01fb\7C\2\2\u01fb\u01fc\7U\2\2\u01fcp\3\2\2\2\u01fd\u01fe"+ - "\7U\2\2\u01fe\u01ff\7G\2\2\u01ff\u0200\7N\2\2\u0200\u0201\7G\2\2\u0201"+ - "\u0202\7E\2\2\u0202\u0203\7V\2\2\u0203r\3\2\2\2\u0204\u0205\7U\2\2\u0205"+ - "\u0206\7J\2\2\u0206\u0207\7Q\2\2\u0207\u0208\7Y\2\2\u0208t\3\2\2\2\u0209"+ - "\u020a\7V\2\2\u020a\u020b\7C\2\2\u020b\u020c\7D\2\2\u020c\u020d\7N\2\2"+ - "\u020d\u020e\7G\2\2\u020e\u020f\7U\2\2\u020fv\3\2\2\2\u0210\u0211\7V\2"+ - "\2\u0211\u0212\7G\2\2\u0212\u0213\7Z\2\2\u0213\u0214\7V\2\2\u0214x\3\2"+ - "\2\2\u0215\u0216\7V\2\2\u0216\u0217\7T\2\2\u0217\u0218\7W\2\2\u0218\u0219"+ - "\7G\2\2\u0219z\3\2\2\2\u021a\u021b\7W\2\2\u021b\u021c\7U\2\2\u021c\u021d"+ - "\7K\2\2\u021d\u021e\7P\2\2\u021e\u021f\7I\2\2\u021f|\3\2\2\2\u0220\u0221"+ - "\7X\2\2\u0221\u0222\7G\2\2\u0222\u0223\7T\2\2\u0223\u0224\7K\2\2\u0224"+ - "\u0225\7H\2\2\u0225\u0226\7[\2\2\u0226~\3\2\2\2\u0227\u0228\7Y\2\2\u0228"+ - "\u0229\7J\2\2\u0229\u022a\7G\2\2\u022a\u022b\7P\2\2\u022b\u0080\3\2\2"+ - "\2\u022c\u022d\7Y\2\2\u022d\u022e\7J\2\2\u022e\u022f\7G\2\2\u022f\u0230"+ - "\7T\2\2\u0230\u0231\7G\2\2\u0231\u0082\3\2\2\2\u0232\u0233\7Y\2\2\u0233"+ - "\u0234\7K\2\2\u0234\u0235\7V\2\2\u0235\u0236\7J\2\2\u0236\u0084\3\2\2"+ - "\2\u0237\u0238\7?\2\2\u0238\u0086\3\2\2\2\u0239\u023a\7>\2\2\u023a\u0241"+ - "\7@\2\2\u023b\u023c\7#\2\2\u023c\u0241\7?\2\2\u023d\u023e\7>\2\2\u023e"+ - "\u023f\7?\2\2\u023f\u0241\7@\2\2\u0240\u0239\3\2\2\2\u0240\u023b\3\2\2"+ - "\2\u0240\u023d\3\2\2\2\u0241\u0088\3\2\2\2\u0242\u0243\7>\2\2\u0243\u008a"+ - "\3\2\2\2\u0244\u0245\7>\2\2\u0245\u0246\7?\2\2\u0246\u008c\3\2\2\2\u0247"+ - "\u0248\7@\2\2\u0248\u008e\3\2\2\2\u0249\u024a\7@\2\2\u024a\u024b\7?\2"+ - "\2\u024b\u0090\3\2\2\2\u024c\u024d\7-\2\2\u024d\u0092\3\2\2\2\u024e\u024f"+ - "\7/\2\2\u024f\u0094\3\2\2\2\u0250\u0251\7,\2\2\u0251\u0096\3\2\2\2\u0252"+ - "\u0253\7\61\2\2\u0253\u0098\3\2\2\2\u0254\u0255\7\'\2\2\u0255\u009a\3"+ - "\2\2\2\u0256\u0257\7~\2\2\u0257\u0258\7~\2\2\u0258\u009c\3\2\2\2\u0259"+ - "\u025f\7)\2\2\u025a\u025e\n\2\2\2\u025b\u025c\7)\2\2\u025c\u025e\7)\2"+ - "\2\u025d\u025a\3\2\2\2\u025d\u025b\3\2\2\2\u025e\u0261\3\2\2\2\u025f\u025d"+ - "\3\2\2\2\u025f\u0260\3\2\2\2\u0260\u0262\3\2\2\2\u0261\u025f\3\2\2\2\u0262"+ - "\u0263\7)\2\2\u0263\u009e\3\2\2\2\u0264\u0266\5\u00adW\2\u0265\u0264\3"+ - "\2\2\2\u0266\u0267\3\2\2\2\u0267\u0265\3\2\2\2\u0267\u0268\3\2\2\2\u0268"+ - "\u00a0\3\2\2\2\u0269\u026b\5\u00adW\2\u026a\u0269\3\2\2\2\u026b\u026c"+ - "\3\2\2\2\u026c\u026a\3\2\2\2\u026c\u026d\3\2\2\2\u026d\u026e\3\2\2\2\u026e"+ - "\u0272\7\60\2\2\u026f\u0271\5\u00adW\2\u0270\u026f\3\2\2\2\u0271\u0274"+ - "\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u0294\3\2\2\2\u0274"+ - "\u0272\3\2\2\2\u0275\u0277\7\60\2\2\u0276\u0278\5\u00adW\2\u0277\u0276"+ - "\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u0277\3\2\2\2\u0279\u027a\3\2\2\2\u027a"+ - "\u0294\3\2\2\2\u027b\u027d\5\u00adW\2\u027c\u027b\3\2\2\2\u027d\u027e"+ - "\3\2\2\2\u027e\u027c\3\2\2\2\u027e\u027f\3\2\2\2\u027f\u0287\3\2\2\2\u0280"+ - "\u0284\7\60\2\2\u0281\u0283\5\u00adW\2\u0282\u0281\3\2\2\2\u0283\u0286"+ - "\3\2\2\2\u0284\u0282\3\2\2\2\u0284\u0285\3\2\2\2\u0285\u0288\3\2\2\2\u0286"+ - "\u0284\3\2\2\2\u0287\u0280\3\2\2\2\u0287\u0288\3\2\2\2\u0288\u0289\3\2"+ - "\2\2\u0289\u028a\5\u00abV\2\u028a\u0294\3\2\2\2\u028b\u028d\7\60\2\2\u028c"+ - "\u028e\5\u00adW\2\u028d\u028c\3\2\2\2\u028e\u028f\3\2\2\2\u028f\u028d"+ - "\3\2\2\2\u028f\u0290\3\2\2\2\u0290\u0291\3\2\2\2\u0291\u0292\5\u00abV"+ - "\2\u0292\u0294\3\2\2\2\u0293\u026a\3\2\2\2\u0293\u0275\3\2\2\2\u0293\u027c"+ - "\3\2\2\2\u0293\u028b\3\2\2\2\u0294\u00a2\3\2\2\2\u0295\u0298\5\u00afX"+ - "\2\u0296\u0298\7a\2\2\u0297\u0295\3\2\2\2\u0297\u0296\3\2\2\2\u0298\u029e"+ - "\3\2\2\2\u0299\u029d\5\u00afX\2\u029a\u029d\5\u00adW\2\u029b\u029d\t\3"+ - "\2\2\u029c\u0299\3\2\2\2\u029c\u029a\3\2\2\2\u029c\u029b\3\2\2\2\u029d"+ - "\u02a0\3\2\2\2\u029e\u029c\3\2\2\2\u029e\u029f\3\2\2\2\u029f\u00a4\3\2"+ - "\2\2\u02a0\u029e\3\2\2\2\u02a1\u02a5\5\u00adW\2\u02a2\u02a6\5\u00afX\2"+ - "\u02a3\u02a6\5\u00adW\2\u02a4\u02a6\t\3\2\2\u02a5\u02a2\3\2\2\2\u02a5"+ - "\u02a3\3\2\2\2\u02a5\u02a4\3\2\2\2\u02a6\u02a7\3\2\2\2\u02a7\u02a5\3\2"+ - "\2\2\u02a7\u02a8\3\2\2\2\u02a8\u00a6\3\2\2\2\u02a9\u02af\7$\2\2\u02aa"+ - "\u02ae\n\4\2\2\u02ab\u02ac\7$\2\2\u02ac\u02ae\7$\2\2\u02ad\u02aa\3\2\2"+ - "\2\u02ad\u02ab\3\2\2\2\u02ae\u02b1\3\2\2\2\u02af\u02ad\3\2\2\2\u02af\u02b0"+ - "\3\2\2\2\u02b0\u02b2\3\2\2\2\u02b1\u02af\3\2\2\2\u02b2\u02b3\7$\2\2\u02b3"+ - "\u00a8\3\2\2\2\u02b4\u02ba\7b\2\2\u02b5\u02b9\n\5\2\2\u02b6\u02b7\7b\2"+ - "\2\u02b7\u02b9\7b\2\2\u02b8\u02b5\3\2\2\2\u02b8\u02b6\3\2\2\2\u02b9\u02bc"+ - "\3\2\2\2\u02ba\u02b8\3\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02bd\3\2\2\2\u02bc"+ - "\u02ba\3\2\2\2\u02bd\u02be\7b\2\2\u02be\u00aa\3\2\2\2\u02bf\u02c1\7G\2"+ - "\2\u02c0\u02c2\t\6\2\2\u02c1\u02c0\3\2\2\2\u02c1\u02c2\3\2\2\2\u02c2\u02c4"+ - "\3\2\2\2\u02c3\u02c5\5\u00adW\2\u02c4\u02c3\3\2\2\2\u02c5\u02c6\3\2\2"+ - "\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2\u02c7\u00ac\3\2\2\2\u02c8\u02c9"+ - "\t\7\2\2\u02c9\u00ae\3\2\2\2\u02ca\u02cb\t\b\2\2\u02cb\u00b0\3\2\2\2\u02cc"+ - "\u02cd\7/\2\2\u02cd\u02ce\7/\2\2\u02ce\u02d2\3\2\2\2\u02cf\u02d1\n\t\2"+ - "\2\u02d0\u02cf\3\2\2\2\u02d1\u02d4\3\2\2\2\u02d2\u02d0\3\2\2\2\u02d2\u02d3"+ - "\3\2\2\2\u02d3\u02d6\3\2\2\2\u02d4\u02d2\3\2\2\2\u02d5\u02d7\7\17\2\2"+ - "\u02d6\u02d5\3\2\2\2\u02d6\u02d7\3\2\2\2\u02d7\u02d9\3\2\2\2\u02d8\u02da"+ - "\7\f\2\2\u02d9\u02d8\3\2\2\2\u02d9\u02da\3\2\2\2\u02da\u02db\3\2\2\2\u02db"+ - "\u02dc\bY\2\2\u02dc\u00b2\3\2\2\2\u02dd\u02de\7\61\2\2\u02de\u02df\7,"+ - "\2\2\u02df\u02e4\3\2\2\2\u02e0\u02e3\5\u00b3Z\2\u02e1\u02e3\13\2\2\2\u02e2"+ - "\u02e0\3\2\2\2\u02e2\u02e1\3\2\2\2\u02e3\u02e6\3\2\2\2\u02e4\u02e5\3\2"+ - "\2\2\u02e4\u02e2\3\2\2\2\u02e5\u02e7\3\2\2\2\u02e6\u02e4\3\2\2\2\u02e7"+ - "\u02e8\7,\2\2\u02e8\u02e9\7\61\2\2\u02e9\u02ea\3\2\2\2\u02ea\u02eb\bZ"+ - "\2\2\u02eb\u00b4\3\2\2\2\u02ec\u02ee\t\n\2\2\u02ed\u02ec\3\2\2\2\u02ee"+ - "\u02ef\3\2\2\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2\2\u02f0\u02f1\3\2"+ - "\2\2\u02f1\u02f2\b[\2\2\u02f2\u00b6\3\2\2\2\u02f3\u02f4\13\2\2\2\u02f4"+ - "\u00b8\3\2\2\2 \2\u0240\u025d\u025f\u0267\u026c\u0272\u0279\u027e\u0284"+ - "\u0287\u028f\u0293\u0297\u029c\u029e\u02a5\u02a7\u02ad\u02af\u02b8\u02ba"+ - "\u02c1\u02c6\u02d2\u02d6\u02d9\u02e2\u02e4\u02ef\3\2\3\2"; + "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\3\2\3\2\3\3\3\3\3\4\3\4\3\5"+ + "\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3"+ + "\b\3\b\3\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f"+ + "\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17"+ + "\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21"+ + "\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23"+ + "\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24\3\25\3\25"+ + "\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26"+ + "\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30"+ + "\3\30\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+ + "\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\35"+ + "\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36"+ + "\3\36\3\36\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3"+ + " \3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3$\3$\3$\3$\3$\3%\3%\3%\3"+ + "%\3%\3&\3&\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3(\3(\3)\3"+ + ")\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3,\3,\3,\3,\3,\3-\3"+ + "-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3\60\3\60\3"+ + "\60\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3"+ + "\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3"+ + "\65\3\65\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3"+ + "\67\3\67\3\67\3\67\38\38\38\38\38\38\38\38\39\39\39\39\39\39\39\3:\3:"+ + "\3:\3:\3:\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3>\3>"+ + "\3>\3>\3>\3?\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B"+ + "\3C\3C\3C\3C\3C\3C\3C\5C\u023a\nC\3D\3D\3E\3E\3E\3F\3F\3G\3G\3G\3H\3H"+ + "\3I\3I\3J\3J\3K\3K\3L\3L\3M\3M\3M\3N\3N\3N\3N\7N\u0257\nN\fN\16N\u025a"+ + "\13N\3N\3N\3O\6O\u025f\nO\rO\16O\u0260\3P\6P\u0264\nP\rP\16P\u0265\3P"+ + "\3P\7P\u026a\nP\fP\16P\u026d\13P\3P\3P\6P\u0271\nP\rP\16P\u0272\3P\6P"+ + "\u0276\nP\rP\16P\u0277\3P\3P\7P\u027c\nP\fP\16P\u027f\13P\5P\u0281\nP"+ + "\3P\3P\3P\3P\6P\u0287\nP\rP\16P\u0288\3P\3P\5P\u028d\nP\3Q\3Q\5Q\u0291"+ + "\nQ\3Q\3Q\3Q\7Q\u0296\nQ\fQ\16Q\u0299\13Q\3R\3R\3R\3R\6R\u029f\nR\rR\16"+ + "R\u02a0\3S\3S\3S\3S\7S\u02a7\nS\fS\16S\u02aa\13S\3S\3S\3T\3T\3T\3T\7T"+ + "\u02b2\nT\fT\16T\u02b5\13T\3T\3T\3U\3U\5U\u02bb\nU\3U\6U\u02be\nU\rU\16"+ + "U\u02bf\3V\3V\3W\3W\3X\3X\3X\3X\7X\u02ca\nX\fX\16X\u02cd\13X\3X\5X\u02d0"+ + "\nX\3X\5X\u02d3\nX\3X\3X\3Y\3Y\3Y\3Y\3Y\7Y\u02dc\nY\fY\16Y\u02df\13Y\3"+ + "Y\3Y\3Y\3Y\3Y\3Z\6Z\u02e7\nZ\rZ\16Z\u02e8\3Z\3Z\3[\3[\3\u02dd\2\\\3\3"+ + "\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21"+ + "!\22#\23%\24\'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!"+ + "A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s"+ + ";u{?}@\177A\u0081B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008f"+ + "I\u0091J\u0093K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3"+ + "S\u00a5T\u00a7U\u00a9\2\u00ab\2\u00ad\2\u00afV\u00b1W\u00b3X\u00b5Y\3"+ + "\2\13\3\2))\5\2<\3\2\2\2\u016a\u016b\7J\2\2\u016b\u016c\7C\2"+ + "\2\u016c\u016d\7X\2\2\u016d\u016e\7K\2\2\u016e\u016f\7P\2\2\u016f\u0170"+ + "\7I\2\2\u0170@\3\2\2\2\u0171\u0172\7K\2\2\u0172\u0173\7P\2\2\u0173B\3"+ + "\2\2\2\u0174\u0175\7K\2\2\u0175\u0176\7P\2\2\u0176\u0177\7P\2\2\u0177"+ + "\u0178\7G\2\2\u0178\u0179\7T\2\2\u0179D\3\2\2\2\u017a\u017b\7K\2\2\u017b"+ + "\u017c\7U\2\2\u017cF\3\2\2\2\u017d\u017e\7L\2\2\u017e\u017f\7Q\2\2\u017f"+ + "\u0180\7K\2\2\u0180\u0181\7P\2\2\u0181H\3\2\2\2\u0182\u0183\7N\2\2\u0183"+ + "\u0184\7G\2\2\u0184\u0185\7H\2\2\u0185\u0186\7V\2\2\u0186J\3\2\2\2\u0187"+ + "\u0188\7N\2\2\u0188\u0189\7K\2\2\u0189\u018a\7M\2\2\u018a\u018b\7G\2\2"+ + "\u018bL\3\2\2\2\u018c\u018d\7N\2\2\u018d\u018e\7K\2\2\u018e\u018f\7O\2"+ + "\2\u018f\u0190\7K\2\2\u0190\u0191\7V\2\2\u0191N\3\2\2\2\u0192\u0193\7"+ + "O\2\2\u0193\u0194\7C\2\2\u0194\u0195\7R\2\2\u0195\u0196\7R\2\2\u0196\u0197"+ + "\7G\2\2\u0197\u0198\7F\2\2\u0198P\3\2\2\2\u0199\u019a\7O\2\2\u019a\u019b"+ + "\7C\2\2\u019b\u019c\7V\2\2\u019c\u019d\7E\2\2\u019d\u019e\7J\2\2\u019e"+ + "R\3\2\2\2\u019f\u01a0\7P\2\2\u01a0\u01a1\7C\2\2\u01a1\u01a2\7V\2\2\u01a2"+ + "\u01a3\7W\2\2\u01a3\u01a4\7T\2\2\u01a4\u01a5\7C\2\2\u01a5\u01a6\7N\2\2"+ + "\u01a6T\3\2\2\2\u01a7\u01a8\7P\2\2\u01a8\u01a9\7Q\2\2\u01a9\u01aa\7V\2"+ + "\2\u01aaV\3\2\2\2\u01ab\u01ac\7P\2\2\u01ac\u01ad\7W\2\2\u01ad\u01ae\7"+ + "N\2\2\u01ae\u01af\7N\2\2\u01afX\3\2\2\2\u01b0\u01b1\7Q\2\2\u01b1\u01b2"+ + "\7P\2\2\u01b2Z\3\2\2\2\u01b3\u01b4\7Q\2\2\u01b4\u01b5\7R\2\2\u01b5\u01b6"+ + "\7V\2\2\u01b6\u01b7\7K\2\2\u01b7\u01b8\7O\2\2\u01b8\u01b9\7K\2\2\u01b9"+ + "\u01ba\7\\\2\2\u01ba\u01bb\7G\2\2\u01bb\u01bc\7F\2\2\u01bc\\\3\2\2\2\u01bd"+ + "\u01be\7Q\2\2\u01be\u01bf\7T\2\2\u01bf^\3\2\2\2\u01c0\u01c1\7Q\2\2\u01c1"+ + "\u01c2\7T\2\2\u01c2\u01c3\7F\2\2\u01c3\u01c4\7G\2\2\u01c4\u01c5\7T\2\2"+ + "\u01c5`\3\2\2\2\u01c6\u01c7\7Q\2\2\u01c7\u01c8\7W\2\2\u01c8\u01c9\7V\2"+ + "\2\u01c9\u01ca\7G\2\2\u01ca\u01cb\7T\2\2\u01cbb\3\2\2\2\u01cc\u01cd\7"+ + "R\2\2\u01cd\u01ce\7C\2\2\u01ce\u01cf\7T\2\2\u01cf\u01d0\7U\2\2\u01d0\u01d1"+ + "\7G\2\2\u01d1\u01d2\7F\2\2\u01d2d\3\2\2\2\u01d3\u01d4\7R\2\2\u01d4\u01d5"+ + "\7J\2\2\u01d5\u01d6\7[\2\2\u01d6\u01d7\7U\2\2\u01d7\u01d8\7K\2\2\u01d8"+ + "\u01d9\7E\2\2\u01d9\u01da\7C\2\2\u01da\u01db\7N\2\2\u01dbf\3\2\2\2\u01dc"+ + "\u01dd\7R\2\2\u01dd\u01de\7N\2\2\u01de\u01df\7C\2\2\u01df\u01e0\7P\2\2"+ + "\u01e0h\3\2\2\2\u01e1\u01e2\7S\2\2\u01e2\u01e3\7W\2\2\u01e3\u01e4\7G\2"+ + "\2\u01e4\u01e5\7T\2\2\u01e5\u01e6\7[\2\2\u01e6j\3\2\2\2\u01e7\u01e8\7"+ + "T\2\2\u01e8\u01e9\7K\2\2\u01e9\u01ea\7I\2\2\u01ea\u01eb\7J\2\2\u01eb\u01ec"+ + "\7V\2\2\u01ecl\3\2\2\2\u01ed\u01ee\7T\2\2\u01ee\u01ef\7N\2\2\u01ef\u01f0"+ + "\7K\2\2\u01f0\u01f1\7M\2\2\u01f1\u01f2\7G\2\2\u01f2n\3\2\2\2\u01f3\u01f4"+ + "\7U\2\2\u01f4\u01f5\7E\2\2\u01f5\u01f6\7J\2\2\u01f6\u01f7\7G\2\2\u01f7"+ + "\u01f8\7O\2\2\u01f8\u01f9\7C\2\2\u01f9\u01fa\7U\2\2\u01fap\3\2\2\2\u01fb"+ + "\u01fc\7U\2\2\u01fc\u01fd\7G\2\2\u01fd\u01fe\7N\2\2\u01fe\u01ff\7G\2\2"+ + "\u01ff\u0200\7E\2\2\u0200\u0201\7V\2\2\u0201r\3\2\2\2\u0202\u0203\7U\2"+ + "\2\u0203\u0204\7J\2\2\u0204\u0205\7Q\2\2\u0205\u0206\7Y\2\2\u0206t\3\2"+ + "\2\2\u0207\u0208\7V\2\2\u0208\u0209\7C\2\2\u0209\u020a\7D\2\2\u020a\u020b"+ + "\7N\2\2\u020b\u020c\7G\2\2\u020c\u020d\7U\2\2\u020dv\3\2\2\2\u020e\u020f"+ + "\7V\2\2\u020f\u0210\7G\2\2\u0210\u0211\7Z\2\2\u0211\u0212\7V\2\2\u0212"+ + "x\3\2\2\2\u0213\u0214\7V\2\2\u0214\u0215\7T\2\2\u0215\u0216\7W\2\2\u0216"+ + "\u0217\7G\2\2\u0217z\3\2\2\2\u0218\u0219\7W\2\2\u0219\u021a\7U\2\2\u021a"+ + "\u021b\7K\2\2\u021b\u021c\7P\2\2\u021c\u021d\7I\2\2\u021d|\3\2\2\2\u021e"+ + "\u021f\7X\2\2\u021f\u0220\7G\2\2\u0220\u0221\7T\2\2\u0221\u0222\7K\2\2"+ + "\u0222\u0223\7H\2\2\u0223\u0224\7[\2\2\u0224~\3\2\2\2\u0225\u0226\7Y\2"+ + "\2\u0226\u0227\7J\2\2\u0227\u0228\7G\2\2\u0228\u0229\7T\2\2\u0229\u022a"+ + "\7G\2\2\u022a\u0080\3\2\2\2\u022b\u022c\7Y\2\2\u022c\u022d\7K\2\2\u022d"+ + "\u022e\7V\2\2\u022e\u022f\7J\2\2\u022f\u0082\3\2\2\2\u0230\u0231\7?\2"+ + "\2\u0231\u0084\3\2\2\2\u0232\u0233\7>\2\2\u0233\u023a\7@\2\2\u0234\u0235"+ + "\7#\2\2\u0235\u023a\7?\2\2\u0236\u0237\7>\2\2\u0237\u0238\7?\2\2\u0238"+ + "\u023a\7@\2\2\u0239\u0232\3\2\2\2\u0239\u0234\3\2\2\2\u0239\u0236\3\2"+ + "\2\2\u023a\u0086\3\2\2\2\u023b\u023c\7>\2\2\u023c\u0088\3\2\2\2\u023d"+ + "\u023e\7>\2\2\u023e\u023f\7?\2\2\u023f\u008a\3\2\2\2\u0240\u0241\7@\2"+ + "\2\u0241\u008c\3\2\2\2\u0242\u0243\7@\2\2\u0243\u0244\7?\2\2\u0244\u008e"+ + "\3\2\2\2\u0245\u0246\7-\2\2\u0246\u0090\3\2\2\2\u0247\u0248\7/\2\2\u0248"+ + "\u0092\3\2\2\2\u0249\u024a\7,\2\2\u024a\u0094\3\2\2\2\u024b\u024c\7\61"+ + "\2\2\u024c\u0096\3\2\2\2\u024d\u024e\7\'\2\2\u024e\u0098\3\2\2\2\u024f"+ + "\u0250\7~\2\2\u0250\u0251\7~\2\2\u0251\u009a\3\2\2\2\u0252\u0258\7)\2"+ + "\2\u0253\u0257\n\2\2\2\u0254\u0255\7)\2\2\u0255\u0257\7)\2\2\u0256\u0253"+ + "\3\2\2\2\u0256\u0254\3\2\2\2\u0257\u025a\3\2\2\2\u0258\u0256\3\2\2\2\u0258"+ + "\u0259\3\2\2\2\u0259\u025b\3\2\2\2\u025a\u0258\3\2\2\2\u025b\u025c\7)"+ + "\2\2\u025c\u009c\3\2\2\2\u025d\u025f\5\u00abV\2\u025e\u025d\3\2\2\2\u025f"+ + "\u0260\3\2\2\2\u0260\u025e\3\2\2\2\u0260\u0261\3\2\2\2\u0261\u009e\3\2"+ + "\2\2\u0262\u0264\5\u00abV\2\u0263\u0262\3\2\2\2\u0264\u0265\3\2\2\2\u0265"+ + "\u0263\3\2\2\2\u0265\u0266\3\2\2\2\u0266\u0267\3\2\2\2\u0267\u026b\7\60"+ + "\2\2\u0268\u026a\5\u00abV\2\u0269\u0268\3\2\2\2\u026a\u026d\3\2\2\2\u026b"+ + "\u0269\3\2\2\2\u026b\u026c\3\2\2\2\u026c\u028d\3\2\2\2\u026d\u026b\3\2"+ + "\2\2\u026e\u0270\7\60\2\2\u026f\u0271\5\u00abV\2\u0270\u026f\3\2\2\2\u0271"+ + "\u0272\3\2\2\2\u0272\u0270\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u028d\3\2"+ + "\2\2\u0274\u0276\5\u00abV\2\u0275\u0274\3\2\2\2\u0276\u0277\3\2\2\2\u0277"+ + "\u0275\3\2\2\2\u0277\u0278\3\2\2\2\u0278\u0280\3\2\2\2\u0279\u027d\7\60"+ + "\2\2\u027a\u027c\5\u00abV\2\u027b\u027a\3\2\2\2\u027c\u027f\3\2\2\2\u027d"+ + "\u027b\3\2\2\2\u027d\u027e\3\2\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2"+ + "\2\2\u0280\u0279\3\2\2\2\u0280\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282"+ + "\u0283\5\u00a9U\2\u0283\u028d\3\2\2\2\u0284\u0286\7\60\2\2\u0285\u0287"+ + "\5\u00abV\2\u0286\u0285\3\2\2\2\u0287\u0288\3\2\2\2\u0288\u0286\3\2\2"+ + "\2\u0288\u0289\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u028b\5\u00a9U\2\u028b"+ + "\u028d\3\2\2\2\u028c\u0263\3\2\2\2\u028c\u026e\3\2\2\2\u028c\u0275\3\2"+ + "\2\2\u028c\u0284\3\2\2\2\u028d\u00a0\3\2\2\2\u028e\u0291\5\u00adW\2\u028f"+ + "\u0291\7a\2\2\u0290\u028e\3\2\2\2\u0290\u028f\3\2\2\2\u0291\u0297\3\2"+ + "\2\2\u0292\u0296\5\u00adW\2\u0293\u0296\5\u00abV\2\u0294\u0296\t\3\2\2"+ + "\u0295\u0292\3\2\2\2\u0295\u0293\3\2\2\2\u0295\u0294\3\2\2\2\u0296\u0299"+ + "\3\2\2\2\u0297\u0295\3\2\2\2\u0297\u0298\3\2\2\2\u0298\u00a2\3\2\2\2\u0299"+ + "\u0297\3\2\2\2\u029a\u029e\5\u00abV\2\u029b\u029f\5\u00adW\2\u029c\u029f"+ + "\5\u00abV\2\u029d\u029f\t\3\2\2\u029e\u029b\3\2\2\2\u029e\u029c\3\2\2"+ + "\2\u029e\u029d\3\2\2\2\u029f\u02a0\3\2\2\2\u02a0\u029e\3\2\2\2\u02a0\u02a1"+ + "\3\2\2\2\u02a1\u00a4\3\2\2\2\u02a2\u02a8\7$\2\2\u02a3\u02a7\n\4\2\2\u02a4"+ + "\u02a5\7$\2\2\u02a5\u02a7\7$\2\2\u02a6\u02a3\3\2\2\2\u02a6\u02a4\3\2\2"+ + "\2\u02a7\u02aa\3\2\2\2\u02a8\u02a6\3\2\2\2\u02a8\u02a9\3\2\2\2\u02a9\u02ab"+ + "\3\2\2\2\u02aa\u02a8\3\2\2\2\u02ab\u02ac\7$\2\2\u02ac\u00a6\3\2\2\2\u02ad"+ + "\u02b3\7b\2\2\u02ae\u02b2\n\5\2\2\u02af\u02b0\7b\2\2\u02b0\u02b2\7b\2"+ + "\2\u02b1\u02ae\3\2\2\2\u02b1\u02af\3\2\2\2\u02b2\u02b5\3\2\2\2\u02b3\u02b1"+ + "\3\2\2\2\u02b3\u02b4\3\2\2\2\u02b4\u02b6\3\2\2\2\u02b5\u02b3\3\2\2\2\u02b6"+ + "\u02b7\7b\2\2\u02b7\u00a8\3\2\2\2\u02b8\u02ba\7G\2\2\u02b9\u02bb\t\6\2"+ + "\2\u02ba\u02b9\3\2\2\2\u02ba\u02bb\3\2\2\2\u02bb\u02bd\3\2\2\2\u02bc\u02be"+ + "\5\u00abV\2\u02bd\u02bc\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf\u02bd\3\2\2"+ + "\2\u02bf\u02c0\3\2\2\2\u02c0\u00aa\3\2\2\2\u02c1\u02c2\t\7\2\2\u02c2\u00ac"+ + "\3\2\2\2\u02c3\u02c4\t\b\2\2\u02c4\u00ae\3\2\2\2\u02c5\u02c6\7/\2\2\u02c6"+ + "\u02c7\7/\2\2\u02c7\u02cb\3\2\2\2\u02c8\u02ca\n\t\2\2\u02c9\u02c8\3\2"+ + "\2\2\u02ca\u02cd\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02cc\3\2\2\2\u02cc"+ + "\u02cf\3\2\2\2\u02cd\u02cb\3\2\2\2\u02ce\u02d0\7\17\2\2\u02cf\u02ce\3"+ + "\2\2\2\u02cf\u02d0\3\2\2\2\u02d0\u02d2\3\2\2\2\u02d1\u02d3\7\f\2\2\u02d2"+ + "\u02d1\3\2\2\2\u02d2\u02d3\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4\u02d5\bX"+ + "\2\2\u02d5\u00b0\3\2\2\2\u02d6\u02d7\7\61\2\2\u02d7\u02d8\7,\2\2\u02d8"+ + "\u02dd\3\2\2\2\u02d9\u02dc\5\u00b1Y\2\u02da\u02dc\13\2\2\2\u02db\u02d9"+ + "\3\2\2\2\u02db\u02da\3\2\2\2\u02dc\u02df\3\2\2\2\u02dd\u02de\3\2\2\2\u02dd"+ + "\u02db\3\2\2\2\u02de\u02e0\3\2\2\2\u02df\u02dd\3\2\2\2\u02e0\u02e1\7,"+ + "\2\2\u02e1\u02e2\7\61\2\2\u02e2\u02e3\3\2\2\2\u02e3\u02e4\bY\2\2\u02e4"+ + "\u00b2\3\2\2\2\u02e5\u02e7\t\n\2\2\u02e6\u02e5\3\2\2\2\u02e7\u02e8\3\2"+ + "\2\2\u02e8\u02e6\3\2\2\2\u02e8\u02e9\3\2\2\2\u02e9\u02ea\3\2\2\2\u02ea"+ + "\u02eb\bZ\2\2\u02eb\u00b4\3\2\2\2\u02ec\u02ed\13\2\2\2\u02ed\u00b6\3\2"+ + "\2\2 \2\u0239\u0256\u0258\u0260\u0265\u026b\u0272\u0277\u027d\u0280\u0288"+ + "\u028c\u0290\u0295\u0297\u029e\u02a0\u02a6\u02a8\u02b1\u02b3\u02ba\u02bf"+ + "\u02cb\u02cf\u02d2\u02db\u02dd\u02e8\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index ecf7c49d275..ed17df83b75 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -600,16 +600,6 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); - /** - * Enter a parse tree produced by {@link SqlBaseParser#columnExpression}. - * @param ctx the parse tree - */ - void enterColumnExpression(SqlBaseParser.ColumnExpressionContext ctx); - /** - * Exit a parse tree produced by {@link SqlBaseParser#columnExpression}. - * @param ctx the parse tree - */ - void exitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx); /** * Enter a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 55f5472daee..5be30ef16ad 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -30,11 +30,11 @@ class SqlBaseParser extends Parser { MAPPED=39, MATCH=40, NATURAL=41, NOT=42, NULL=43, ON=44, OPTIMIZED=45, OR=46, ORDER=47, OUTER=48, PARSED=49, PHYSICAL=50, PLAN=51, QUERY=52, RIGHT=53, RLIKE=54, SCHEMAS=55, SELECT=56, SHOW=57, TABLES=58, TEXT=59, - TRUE=60, USING=61, VERIFY=62, WHEN=63, WHERE=64, WITH=65, EQ=66, NEQ=67, - LT=68, LTE=69, GT=70, GTE=71, PLUS=72, MINUS=73, ASTERISK=74, SLASH=75, - PERCENT=76, CONCAT=77, STRING=78, INTEGER_VALUE=79, DECIMAL_VALUE=80, - IDENTIFIER=81, DIGIT_IDENTIFIER=82, QUOTED_IDENTIFIER=83, BACKQUOTED_IDENTIFIER=84, - SIMPLE_COMMENT=85, BRACKETED_COMMENT=86, WS=87, UNRECOGNIZED=88, DELIMITER=89; + TRUE=60, USING=61, VERIFY=62, WHERE=63, WITH=64, EQ=65, NEQ=66, LT=67, + LTE=68, GT=69, GTE=70, PLUS=71, MINUS=72, ASTERISK=73, SLASH=74, PERCENT=75, + CONCAT=76, STRING=77, INTEGER_VALUE=78, DECIMAL_VALUE=79, IDENTIFIER=80, + DIGIT_IDENTIFIER=81, QUOTED_IDENTIFIER=82, BACKQUOTED_IDENTIFIER=83, SIMPLE_COMMENT=84, + BRACKETED_COMMENT=85, WS=86, UNRECOGNIZED=87, DELIMITER=88; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, RULE_query = 3, RULE_queryNoWith = 4, RULE_queryTerm = 5, RULE_orderBy = 6, @@ -43,20 +43,20 @@ class SqlBaseParser extends Parser { RULE_selectItem = 14, RULE_relation = 15, RULE_joinRelation = 16, RULE_joinType = 17, RULE_joinCriteria = 18, RULE_relationPrimary = 19, RULE_expression = 20, RULE_booleanExpression = 21, RULE_predicated = 22, RULE_predicate = 23, - RULE_valueExpression = 24, RULE_primaryExpression = 25, RULE_columnExpression = 26, - RULE_constant = 27, RULE_comparisonOperator = 28, RULE_booleanValue = 29, - RULE_dataType = 30, RULE_qualifiedName = 31, RULE_tableIdentifier = 32, - RULE_identifier = 33, RULE_quoteIdentifier = 34, RULE_unquoteIdentifier = 35, - RULE_number = 36, RULE_nonReserved = 37; + RULE_valueExpression = 24, RULE_primaryExpression = 25, RULE_constant = 26, + RULE_comparisonOperator = 27, RULE_booleanValue = 28, RULE_dataType = 29, + RULE_qualifiedName = 30, RULE_tableIdentifier = 31, RULE_identifier = 32, + RULE_quoteIdentifier = 33, RULE_unquoteIdentifier = 34, RULE_number = 35, + RULE_nonReserved = 36; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "queryTerm", "orderBy", "querySpecification", "fromClause", "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier", "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", "relationPrimary", "expression", "booleanExpression", "predicated", "predicate", - "valueExpression", "primaryExpression", "columnExpression", "constant", - "comparisonOperator", "booleanValue", "dataType", "qualifiedName", "tableIdentifier", - "identifier", "quoteIdentifier", "unquoteIdentifier", "number", "nonReserved" + "valueExpression", "primaryExpression", "constant", "comparisonOperator", + "booleanValue", "dataType", "qualifiedName", "tableIdentifier", "identifier", + "quoteIdentifier", "unquoteIdentifier", "number", "nonReserved" }; private static final String[] _LITERAL_NAMES = { @@ -69,8 +69,8 @@ class SqlBaseParser extends Parser { "'NULL'", "'ON'", "'OPTIMIZED'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'QUERY'", "'RIGHT'", "'RLIKE'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'TABLES'", "'TEXT'", "'TRUE'", "'USING'", "'VERIFY'", - "'WHEN'", "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", - "'+'", "'-'", "'*'", "'/'", "'%'", "'||'" + "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'", + "'-'", "'*'", "'/'", "'%'", "'||'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -80,9 +80,9 @@ class SqlBaseParser extends Parser { "IS", "JOIN", "LEFT", "LIKE", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "QUERY", "RIGHT", "RLIKE", "SCHEMAS", "SELECT", "SHOW", "TABLES", - "TEXT", "TRUE", "USING", "VERIFY", "WHEN", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", + "TEXT", "TRUE", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "LT", + "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", + "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED", "DELIMITER" }; @@ -165,9 +165,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(76); + setState(74); statement(); - setState(77); + setState(75); match(EOF); } } @@ -212,9 +212,9 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(79); + setState(77); expression(); - setState(80); + setState(78); match(EOF); } } @@ -469,14 +469,14 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(139); + setState(137); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,10,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(82); + setState(80); query(); } break; @@ -484,27 +484,27 @@ class SqlBaseParser extends Parser { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(83); + setState(81); match(EXPLAIN); - setState(97); + setState(95); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(84); + setState(82); match(T__0); - setState(93); + setState(91); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FORMAT) | (1L << PLAN) | (1L << VERIFY))) != 0)) { { - setState(91); + setState(89); switch (_input.LA(1)) { case PLAN: { - setState(85); + setState(83); match(PLAN); - setState(86); + setState(84); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) { @@ -516,9 +516,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(87); + setState(85); match(FORMAT); - setState(88); + setState(86); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -530,9 +530,9 @@ class SqlBaseParser extends Parser { break; case VERIFY: { - setState(89); + setState(87); match(VERIFY); - setState(90); + setState(88); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -540,16 +540,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(95); + setState(93); _errHandler.sync(this); _la = _input.LA(1); } - setState(96); + setState(94); match(T__1); } break; } - setState(99); + setState(97); statement(); } break; @@ -557,27 +557,27 @@ class SqlBaseParser extends Parser { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(100); + setState(98); match(DEBUG); - setState(112); + setState(110); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(101); + setState(99); match(T__0); - setState(108); + setState(106); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(106); + setState(104); switch (_input.LA(1)) { case PLAN: { - setState(102); + setState(100); match(PLAN); - setState(103); + setState(101); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -589,9 +589,9 @@ class SqlBaseParser extends Parser { break; case FORMAT: { - setState(104); + setState(102); match(FORMAT); - setState(105); + setState(103); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -605,16 +605,16 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } } - setState(110); + setState(108); _errHandler.sync(this); _la = _input.LA(1); } - setState(111); + setState(109); match(T__1); } break; } - setState(114); + setState(112); statement(); } break; @@ -622,24 +622,24 @@ class SqlBaseParser extends Parser { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(115); + setState(113); match(SHOW); - setState(116); + setState(114); match(TABLES); - setState(121); + setState(119); _la = _input.LA(1); if (_la==LIKE || _la==STRING) { { - setState(118); + setState(116); _la = _input.LA(1); if (_la==LIKE) { { - setState(117); + setState(115); match(LIKE); } } - setState(120); + setState(118); ((ShowTablesContext)_localctx).pattern = match(STRING); } } @@ -650,58 +650,58 @@ class SqlBaseParser extends Parser { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(123); + setState(121); match(SHOW); - setState(124); + setState(122); match(COLUMNS); - setState(125); + setState(123); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } + setState(124); + tableIdentifier(); + } + break; + case 6: + _localctx = new ShowColumnsContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(125); + _la = _input.LA(1); + if ( !(_la==DESC || _la==DESCRIBE) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } setState(126); tableIdentifier(); } break; - case 6: - _localctx = new ShowColumnsContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(127); - _la = _input.LA(1); - if ( !(_la==DESC || _la==DESCRIBE) ) { - _errHandler.recoverInline(this); - } else { - consume(); - } - setState(128); - tableIdentifier(); - } - break; case 7: _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(129); + setState(127); match(SHOW); - setState(130); + setState(128); match(FUNCTIONS); - setState(135); + setState(133); _la = _input.LA(1); if (_la==LIKE || _la==STRING) { { - setState(132); + setState(130); _la = _input.LA(1); if (_la==LIKE) { { - setState(131); + setState(129); match(LIKE); } } - setState(134); + setState(132); ((ShowFunctionsContext)_localctx).pattern = match(STRING); } } @@ -712,9 +712,9 @@ class SqlBaseParser extends Parser { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(137); + setState(135); match(SHOW); - setState(138); + setState(136); match(SCHEMAS); } break; @@ -768,34 +768,34 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(150); + setState(148); _la = _input.LA(1); if (_la==WITH) { { - setState(141); + setState(139); match(WITH); - setState(142); + setState(140); namedQuery(); - setState(147); + setState(145); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(143); + setState(141); match(T__2); - setState(144); + setState(142); namedQuery(); } } - setState(149); + setState(147); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(152); + setState(150); queryNoWith(); } } @@ -852,44 +852,44 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(154); + setState(152); queryTerm(); - setState(165); + setState(163); _la = _input.LA(1); if (_la==ORDER) { { - setState(155); + setState(153); match(ORDER); - setState(156); + setState(154); match(BY); - setState(157); + setState(155); orderBy(); - setState(162); + setState(160); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(158); + setState(156); match(T__2); - setState(159); + setState(157); orderBy(); } } - setState(164); + setState(162); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(169); + setState(167); _la = _input.LA(1); if (_la==LIMIT) { { - setState(167); + setState(165); match(LIMIT); - setState(168); + setState(166); ((QueryNoWithContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -967,13 +967,13 @@ class SqlBaseParser extends Parser { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 10, RULE_queryTerm); try { - setState(176); + setState(174); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(171); + setState(169); querySpecification(); } break; @@ -981,11 +981,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(172); + setState(170); match(T__0); - setState(173); + setState(171); queryNoWith(); - setState(174); + setState(172); match(T__1); } break; @@ -1037,13 +1037,13 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(178); + setState(176); expression(); - setState(180); + setState(178); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(179); + setState(177); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1122,75 +1122,75 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(182); + setState(180); match(SELECT); - setState(184); + setState(182); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(183); + setState(181); setQuantifier(); } } - setState(186); + setState(184); selectItem(); - setState(191); + setState(189); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(187); + setState(185); match(T__2); - setState(188); + setState(186); selectItem(); } } - setState(193); + setState(191); _errHandler.sync(this); _la = _input.LA(1); } - setState(195); + setState(193); _la = _input.LA(1); if (_la==FROM) { { - setState(194); + setState(192); fromClause(); } } - setState(199); + setState(197); _la = _input.LA(1); if (_la==WHERE) { { - setState(197); + setState(195); match(WHERE); - setState(198); + setState(196); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(204); + setState(202); _la = _input.LA(1); if (_la==GROUP) { { - setState(201); + setState(199); match(GROUP); - setState(202); + setState(200); match(BY); - setState(203); + setState(201); groupBy(); } } - setState(208); + setState(206); _la = _input.LA(1); if (_la==HAVING) { { - setState(206); + setState(204); match(HAVING); - setState(207); + setState(205); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1242,23 +1242,23 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(210); + setState(208); match(FROM); - setState(211); + setState(209); relation(); - setState(216); + setState(214); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(212); + setState(210); match(T__2); - setState(213); + setState(211); relation(); } } - setState(218); + setState(216); _errHandler.sync(this); _la = _input.LA(1); } @@ -1311,30 +1311,30 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(220); + setState(218); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(219); + setState(217); setQuantifier(); } } - setState(222); + setState(220); groupingElement(); - setState(227); + setState(225); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(223); + setState(221); match(T__2); - setState(224); + setState(222); groupingElement(); } } - setState(229); + setState(227); _errHandler.sync(this); _la = _input.LA(1); } @@ -1389,7 +1389,7 @@ class SqlBaseParser extends Parser { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(230); + setState(228); groupingExpressions(); } } @@ -1435,47 +1435,47 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 22, RULE_groupingExpressions); int _la; try { - setState(245); + setState(243); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(232); + setState(230); match(T__0); - setState(241); + setState(239); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << TRUE) | (1L << VERIFY))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (PLUS - 72)) | (1L << (MINUS - 72)) | (1L << (ASTERISK - 72)) | (1L << (STRING - 72)) | (1L << (INTEGER_VALUE - 72)) | (1L << (DECIMAL_VALUE - 72)) | (1L << (IDENTIFIER - 72)) | (1L << (DIGIT_IDENTIFIER - 72)) | (1L << (QUOTED_IDENTIFIER - 72)) | (1L << (BACKQUOTED_IDENTIFIER - 72)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << TRUE) | (1L << VERIFY))) != 0) || ((((_la - 71)) & ~0x3f) == 0 && ((1L << (_la - 71)) & ((1L << (PLUS - 71)) | (1L << (MINUS - 71)) | (1L << (ASTERISK - 71)) | (1L << (STRING - 71)) | (1L << (INTEGER_VALUE - 71)) | (1L << (DECIMAL_VALUE - 71)) | (1L << (IDENTIFIER - 71)) | (1L << (DIGIT_IDENTIFIER - 71)) | (1L << (QUOTED_IDENTIFIER - 71)) | (1L << (BACKQUOTED_IDENTIFIER - 71)))) != 0)) { { - setState(233); + setState(231); expression(); - setState(238); + setState(236); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(234); + setState(232); match(T__2); - setState(235); + setState(233); expression(); } } - setState(240); + setState(238); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(243); + setState(241); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(244); + setState(242); expression(); } break; @@ -1526,15 +1526,15 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(247); + setState(245); ((NamedQueryContext)_localctx).name = identifier(); - setState(248); + setState(246); match(AS); - setState(249); + setState(247); match(T__0); - setState(250); + setState(248); queryNoWith(); - setState(251); + setState(249); match(T__1); } } @@ -1578,7 +1578,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(253); + setState(251); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -1641,23 +1641,23 @@ class SqlBaseParser extends Parser { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(255); + setState(253); expression(); - setState(260); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(257); + setState(255); _la = _input.LA(1); if (_la==AS) { { - setState(256); + setState(254); match(AS); } } - setState(259); + setState(257); identifier(); } break; @@ -1711,19 +1711,19 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(262); + setState(260); relationPrimary(); - setState(266); + setState(264); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) { { { - setState(263); + setState(261); joinRelation(); } } - setState(268); + setState(266); _errHandler.sync(this); _la = _input.LA(1); } @@ -1777,7 +1777,7 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 32, RULE_joinRelation); int _la; try { - setState(280); + setState(278); switch (_input.LA(1)) { case FULL: case INNER: @@ -1787,18 +1787,18 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { { - setState(269); + setState(267); joinType(); } - setState(270); + setState(268); match(JOIN); - setState(271); + setState(269); ((JoinRelationContext)_localctx).right = relationPrimary(); - setState(273); + setState(271); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(272); + setState(270); joinCriteria(); } } @@ -1808,13 +1808,13 @@ class SqlBaseParser extends Parser { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(275); + setState(273); match(NATURAL); - setState(276); + setState(274); joinType(); - setState(277); + setState(275); match(JOIN); - setState(278); + setState(276); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -1863,17 +1863,17 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 34, RULE_joinType); int _la; try { - setState(297); + setState(295); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(283); + setState(281); _la = _input.LA(1); if (_la==INNER) { { - setState(282); + setState(280); match(INNER); } } @@ -1883,13 +1883,13 @@ class SqlBaseParser extends Parser { case LEFT: enterOuterAlt(_localctx, 2); { - setState(285); + setState(283); match(LEFT); - setState(287); + setState(285); _la = _input.LA(1); if (_la==OUTER) { { - setState(286); + setState(284); match(OUTER); } } @@ -1899,13 +1899,13 @@ class SqlBaseParser extends Parser { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(289); + setState(287); match(RIGHT); - setState(291); + setState(289); _la = _input.LA(1); if (_la==OUTER) { { - setState(290); + setState(288); match(OUTER); } } @@ -1915,13 +1915,13 @@ class SqlBaseParser extends Parser { case FULL: enterOuterAlt(_localctx, 4); { - setState(293); + setState(291); match(FULL); - setState(295); + setState(293); _la = _input.LA(1); if (_la==OUTER) { { - setState(294); + setState(292); match(OUTER); } } @@ -1979,43 +1979,43 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 36, RULE_joinCriteria); int _la; try { - setState(313); + setState(311); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(299); + setState(297); match(ON); - setState(300); + setState(298); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(301); + setState(299); match(USING); - setState(302); + setState(300); match(T__0); - setState(303); + setState(301); identifier(); - setState(308); + setState(306); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(304); + setState(302); match(T__2); - setState(305); + setState(303); identifier(); } } - setState(310); + setState(308); _errHandler.sync(this); _la = _input.LA(1); } - setState(311); + setState(309); match(T__1); } break; @@ -2120,29 +2120,29 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 38, RULE_relationPrimary); int _la; try { - setState(340); + setState(338); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(315); + setState(313); tableIdentifier(); - setState(320); + setState(318); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (IDENTIFIER - 81)) | (1L << (DIGIT_IDENTIFIER - 81)) | (1L << (QUOTED_IDENTIFIER - 81)) | (1L << (BACKQUOTED_IDENTIFIER - 81)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (IDENTIFIER - 80)) | (1L << (DIGIT_IDENTIFIER - 80)) | (1L << (QUOTED_IDENTIFIER - 80)) | (1L << (BACKQUOTED_IDENTIFIER - 80)))) != 0)) { { - setState(317); + setState(315); _la = _input.LA(1); if (_la==AS) { { - setState(316); + setState(314); match(AS); } } - setState(319); + setState(317); qualifiedName(); } } @@ -2153,26 +2153,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(322); + setState(320); match(T__0); - setState(323); + setState(321); queryNoWith(); - setState(324); + setState(322); match(T__1); - setState(329); + setState(327); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (IDENTIFIER - 81)) | (1L << (DIGIT_IDENTIFIER - 81)) | (1L << (QUOTED_IDENTIFIER - 81)) | (1L << (BACKQUOTED_IDENTIFIER - 81)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (IDENTIFIER - 80)) | (1L << (DIGIT_IDENTIFIER - 80)) | (1L << (QUOTED_IDENTIFIER - 80)) | (1L << (BACKQUOTED_IDENTIFIER - 80)))) != 0)) { { - setState(326); + setState(324); _la = _input.LA(1); if (_la==AS) { { - setState(325); + setState(323); match(AS); } } - setState(328); + setState(326); qualifiedName(); } } @@ -2183,26 +2183,26 @@ class SqlBaseParser extends Parser { _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(331); + setState(329); match(T__0); - setState(332); + setState(330); relation(); - setState(333); + setState(331); match(T__1); - setState(338); + setState(336); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (IDENTIFIER - 81)) | (1L << (DIGIT_IDENTIFIER - 81)) | (1L << (QUOTED_IDENTIFIER - 81)) | (1L << (BACKQUOTED_IDENTIFIER - 81)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (IDENTIFIER - 80)) | (1L << (DIGIT_IDENTIFIER - 80)) | (1L << (QUOTED_IDENTIFIER - 80)) | (1L << (BACKQUOTED_IDENTIFIER - 80)))) != 0)) { { - setState(335); + setState(333); _la = _input.LA(1); if (_la==AS) { { - setState(334); + setState(332); match(AS); } } - setState(337); + setState(335); qualifiedName(); } } @@ -2251,7 +2251,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(342); + setState(340); booleanExpression(0); } } @@ -2454,7 +2454,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(391); + setState(389); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: @@ -2463,9 +2463,9 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(345); + setState(343); match(NOT); - setState(346); + setState(344); booleanExpression(8); } break; @@ -2474,13 +2474,13 @@ class SqlBaseParser extends Parser { _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(347); + setState(345); match(EXISTS); - setState(348); + setState(346); match(T__0); - setState(349); + setState(347); query(); - setState(350); + setState(348); match(T__1); } break; @@ -2489,29 +2489,29 @@ class SqlBaseParser extends Parser { _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(352); + setState(350); match(QUERY); - setState(353); + setState(351); match(T__0); - setState(354); + setState(352); ((StringQueryContext)_localctx).queryString = match(STRING); - setState(359); + setState(357); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(355); + setState(353); match(T__2); - setState(356); + setState(354); ((StringQueryContext)_localctx).options = match(STRING); } } - setState(361); + setState(359); _errHandler.sync(this); _la = _input.LA(1); } - setState(362); + setState(360); match(T__1); } break; @@ -2520,33 +2520,33 @@ class SqlBaseParser extends Parser { _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(363); + setState(361); match(MATCH); - setState(364); + setState(362); match(T__0); - setState(365); + setState(363); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(366); + setState(364); match(T__2); - setState(367); + setState(365); ((MatchQueryContext)_localctx).queryString = match(STRING); - setState(372); + setState(370); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(368); + setState(366); match(T__2); - setState(369); + setState(367); ((MatchQueryContext)_localctx).options = match(STRING); } } - setState(374); + setState(372); _errHandler.sync(this); _la = _input.LA(1); } - setState(375); + setState(373); match(T__1); } break; @@ -2555,33 +2555,33 @@ class SqlBaseParser extends Parser { _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(377); + setState(375); match(MATCH); - setState(378); + setState(376); match(T__0); - setState(379); + setState(377); ((MultiMatchQueryContext)_localctx).multiFields = match(STRING); - setState(380); + setState(378); match(T__2); - setState(381); + setState(379); ((MultiMatchQueryContext)_localctx).queryString = match(STRING); - setState(386); + setState(384); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(382); + setState(380); match(T__2); - setState(383); + setState(381); ((MultiMatchQueryContext)_localctx).options = match(STRING); } } - setState(388); + setState(386); _errHandler.sync(this); _la = _input.LA(1); } - setState(389); + setState(387); match(T__1); } break; @@ -2590,13 +2590,13 @@ class SqlBaseParser extends Parser { _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(390); + setState(388); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(401); + setState(399); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -2604,7 +2604,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(399); + setState(397); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: @@ -2612,11 +2612,11 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(393); + setState(391); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(394); + setState(392); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(395); + setState(393); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -2625,18 +2625,18 @@ class SqlBaseParser extends Parser { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(396); + setState(394); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(397); + setState(395); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(398); + setState(396); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(403); + setState(401); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -2685,14 +2685,14 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(404); + setState(402); valueExpression(0); - setState(406); + setState(404); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(405); + setState(403); predicate(); } break; @@ -2762,104 +2762,104 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 46, RULE_predicate); int _la; try { - setState(449); + setState(447); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,62,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(409); + setState(407); _la = _input.LA(1); if (_la==NOT) { { - setState(408); + setState(406); match(NOT); } } - setState(411); + setState(409); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(412); + setState(410); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(413); + setState(411); match(AND); - setState(414); + setState(412); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(417); + setState(415); _la = _input.LA(1); if (_la==NOT) { { - setState(416); + setState(414); match(NOT); } } - setState(419); + setState(417); ((PredicateContext)_localctx).kind = match(IN); - setState(420); + setState(418); match(T__0); - setState(421); + setState(419); expression(); - setState(426); + setState(424); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(422); + setState(420); match(T__2); - setState(423); + setState(421); expression(); } } - setState(428); + setState(426); _errHandler.sync(this); _la = _input.LA(1); } - setState(429); + setState(427); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(432); + setState(430); _la = _input.LA(1); if (_la==NOT) { { - setState(431); + setState(429); match(NOT); } } - setState(434); + setState(432); ((PredicateContext)_localctx).kind = match(IN); - setState(435); + setState(433); match(T__0); - setState(436); + setState(434); query(); - setState(437); + setState(435); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(440); + setState(438); _la = _input.LA(1); if (_la==NOT) { { - setState(439); + setState(437); match(NOT); } } - setState(442); + setState(440); ((PredicateContext)_localctx).kind = _input.LT(1); _la = _input.LA(1); if ( !(_la==LIKE || _la==RLIKE) ) { @@ -2867,25 +2867,25 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(443); + setState(441); ((PredicateContext)_localctx).pattern = valueExpression(0); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(444); + setState(442); match(IS); - setState(446); + setState(444); _la = _input.LA(1); if (_la==NOT) { { - setState(445); + setState(443); match(NOT); } } - setState(448); + setState(446); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3028,7 +3028,7 @@ class SqlBaseParser extends Parser { int _alt; enterOuterAlt(_localctx, 1); { - setState(455); + setState(453); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3071,7 +3071,7 @@ class SqlBaseParser extends Parser { _ctx = _localctx; _prevctx = _localctx; - setState(452); + setState(450); primaryExpression(); } break; @@ -3081,7 +3081,7 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(453); + setState(451); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3089,7 +3089,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(454); + setState(452); valueExpression(4); } break; @@ -3097,7 +3097,7 @@ class SqlBaseParser extends Parser { throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(469); + setState(467); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,65,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3105,7 +3105,7 @@ class SqlBaseParser extends Parser { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(467); + setState(465); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: @@ -3113,17 +3113,17 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(457); + setState(455); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(458); + setState(456); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 74)) & ~0x3f) == 0 && ((1L << (_la - 74)) & ((1L << (ASTERISK - 74)) | (1L << (SLASH - 74)) | (1L << (PERCENT - 74)))) != 0)) ) { + if ( !(((((_la - 73)) & ~0x3f) == 0 && ((1L << (_la - 73)) & ((1L << (ASTERISK - 73)) | (1L << (SLASH - 73)) | (1L << (PERCENT - 73)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(459); + setState(457); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3132,9 +3132,9 @@ class SqlBaseParser extends Parser { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(460); + setState(458); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(461); + setState(459); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3142,7 +3142,7 @@ class SqlBaseParser extends Parser { } else { consume(); } - setState(462); + setState(460); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3151,18 +3151,18 @@ class SqlBaseParser extends Parser { _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(463); + setState(461); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(464); + setState(462); comparisonOperator(); - setState(465); + setState(463); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(471); + setState(469); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,65,_ctx); } @@ -3191,13 +3191,8 @@ class SqlBaseParser extends Parser { } } public static class DereferenceContext extends PrimaryExpressionContext { - public ColumnExpressionContext base; - public IdentifierContext fieldName; - public ColumnExpressionContext columnExpression() { - return getRuleContext(ColumnExpressionContext.class,0); - } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); } public DereferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override @@ -3258,8 +3253,8 @@ class SqlBaseParser extends Parser { } } public static class ColumnReferenceContext extends PrimaryExpressionContext { - public ColumnExpressionContext columnExpression() { - return getRuleContext(ColumnExpressionContext.class,0); + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); } public ColumnReferenceContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override @@ -3321,10 +3316,9 @@ class SqlBaseParser extends Parser { } } public static class StarContext extends PrimaryExpressionContext { - public ColumnExpressionContext qualifier; public TerminalNode ASTERISK() { return getToken(SqlBaseParser.ASTERISK, 0); } - public ColumnExpressionContext columnExpression() { - return getRuleContext(ColumnExpressionContext.class,0); + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); } public StarContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override @@ -3394,24 +3388,24 @@ class SqlBaseParser extends Parser { enterRule(_localctx, 50, RULE_primaryExpression); int _la; try { - setState(524); + setState(519); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,70,_ctx) ) { case 1: _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(472); + setState(470); match(CAST); - setState(473); + setState(471); match(T__0); - setState(474); + setState(472); expression(); - setState(475); + setState(473); match(AS); - setState(476); + setState(474); dataType(); - setState(477); + setState(475); match(T__1); } break; @@ -3419,17 +3413,17 @@ class SqlBaseParser extends Parser { _localctx = new ExtractContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(479); + setState(477); match(EXTRACT); - setState(480); + setState(478); match(T__0); - setState(481); + setState(479); ((ExtractContext)_localctx).field = identifier(); - setState(482); + setState(480); match(FROM); - setState(483); + setState(481); valueExpression(0); - setState(484); + setState(482); match(T__1); } break; @@ -3437,7 +3431,7 @@ class SqlBaseParser extends Parser { _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(486); + setState(484); constant(); } break; @@ -3445,7 +3439,7 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(487); + setState(485); match(ASTERISK); } break; @@ -3453,18 +3447,18 @@ class SqlBaseParser extends Parser { _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(491); + setState(489); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 81)) & ~0x3f) == 0 && ((1L << (_la - 81)) & ((1L << (IDENTIFIER - 81)) | (1L << (DIGIT_IDENTIFIER - 81)) | (1L << (QUOTED_IDENTIFIER - 81)) | (1L << (BACKQUOTED_IDENTIFIER - 81)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0) || ((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (IDENTIFIER - 80)) | (1L << (DIGIT_IDENTIFIER - 80)) | (1L << (QUOTED_IDENTIFIER - 80)) | (1L << (BACKQUOTED_IDENTIFIER - 80)))) != 0)) { { - setState(488); - ((StarContext)_localctx).qualifier = columnExpression(); - setState(489); + setState(486); + qualifiedName(); + setState(487); match(T__3); } } - setState(493); + setState(491); match(ASTERISK); } break; @@ -3472,45 +3466,45 @@ class SqlBaseParser extends Parser { _localctx = new FunctionCallContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(494); + setState(492); identifier(); - setState(495); + setState(493); match(T__0); - setState(507); + setState(505); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << TRUE) | (1L << VERIFY))) != 0) || ((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (PLUS - 72)) | (1L << (MINUS - 72)) | (1L << (ASTERISK - 72)) | (1L << (STRING - 72)) | (1L << (INTEGER_VALUE - 72)) | (1L << (DECIMAL_VALUE - 72)) | (1L << (IDENTIFIER - 72)) | (1L << (DIGIT_IDENTIFIER - 72)) | (1L << (QUOTED_IDENTIFIER - 72)) | (1L << (BACKQUOTED_IDENTIFIER - 72)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << TRUE) | (1L << VERIFY))) != 0) || ((((_la - 71)) & ~0x3f) == 0 && ((1L << (_la - 71)) & ((1L << (PLUS - 71)) | (1L << (MINUS - 71)) | (1L << (ASTERISK - 71)) | (1L << (STRING - 71)) | (1L << (INTEGER_VALUE - 71)) | (1L << (DECIMAL_VALUE - 71)) | (1L << (IDENTIFIER - 71)) | (1L << (DIGIT_IDENTIFIER - 71)) | (1L << (QUOTED_IDENTIFIER - 71)) | (1L << (BACKQUOTED_IDENTIFIER - 71)))) != 0)) { { - setState(497); + setState(495); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(496); + setState(494); setQuantifier(); } } - setState(499); + setState(497); expression(); - setState(504); + setState(502); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(500); + setState(498); match(T__2); - setState(501); + setState(499); expression(); } } - setState(506); + setState(504); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(509); + setState(507); match(T__1); } break; @@ -3518,11 +3512,11 @@ class SqlBaseParser extends Parser { _localctx = new SubqueryExpressionContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(511); + setState(509); match(T__0); - setState(512); + setState(510); query(); - setState(513); + setState(511); match(T__1); } break; @@ -3530,31 +3524,27 @@ class SqlBaseParser extends Parser { _localctx = new ColumnReferenceContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(515); - columnExpression(); + setState(513); + identifier(); } break; case 9: _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(516); - ((DereferenceContext)_localctx).base = columnExpression(); - setState(517); - match(T__3); - setState(518); - ((DereferenceContext)_localctx).fieldName = identifier(); + setState(514); + qualifiedName(); } break; case 10: _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(520); + setState(515); match(T__0); - setState(521); + setState(516); expression(); - setState(522); + setState(517); match(T__1); } break; @@ -3571,85 +3561,6 @@ class SqlBaseParser extends Parser { return _localctx; } - public static class ColumnExpressionContext extends ParserRuleContext { - public IdentifierContext alias; - public TableIdentifierContext table; - public IdentifierContext name; - public List identifier() { - return getRuleContexts(IdentifierContext.class); - } - public IdentifierContext identifier(int i) { - return getRuleContext(IdentifierContext.class,i); - } - public TableIdentifierContext tableIdentifier() { - return getRuleContext(TableIdentifierContext.class,0); - } - public ColumnExpressionContext(ParserRuleContext parent, int invokingState) { - super(parent, invokingState); - } - @Override public int getRuleIndex() { return RULE_columnExpression; } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterColumnExpression(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitColumnExpression(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitColumnExpression(this); - else return visitor.visitChildren(this); - } - } - - public final ColumnExpressionContext columnExpression() throws RecognitionException { - ColumnExpressionContext _localctx = new ColumnExpressionContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_columnExpression); - try { - enterOuterAlt(_localctx, 1); - { - setState(532); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) { - case 1: - { - setState(528); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,71,_ctx) ) { - case 1: - { - setState(526); - ((ColumnExpressionContext)_localctx).alias = identifier(); - } - break; - case 2: - { - setState(527); - ((ColumnExpressionContext)_localctx).table = tableIdentifier(); - } - break; - } - setState(530); - match(T__3); - } - break; - } - setState(534); - ((ColumnExpressionContext)_localctx).name = identifier(); - } - } - catch (RecognitionException re) { - _localctx.exception = re; - _errHandler.reportError(this, re); - _errHandler.recover(this, re); - } - finally { - exitRule(); - } - return _localctx; - } - public static class ConstantContext extends ParserRuleContext { public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -3759,16 +3670,16 @@ class SqlBaseParser extends Parser { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_constant); + enterRule(_localctx, 52, RULE_constant); try { int _alt; - setState(547); + setState(532); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(536); + setState(521); match(NULL); } break; @@ -3801,9 +3712,9 @@ class SqlBaseParser extends Parser { _localctx = new TypeConstructorContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(537); + setState(522); identifier(); - setState(538); + setState(523); match(STRING); } break; @@ -3812,7 +3723,7 @@ class SqlBaseParser extends Parser { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(540); + setState(525); number(); } break; @@ -3821,7 +3732,7 @@ class SqlBaseParser extends Parser { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(541); + setState(526); booleanValue(); } break; @@ -3829,7 +3740,7 @@ class SqlBaseParser extends Parser { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(543); + setState(528); _errHandler.sync(this); _alt = 1; do { @@ -3837,7 +3748,7 @@ class SqlBaseParser extends Parser { case 1: { { - setState(542); + setState(527); match(STRING); } } @@ -3845,9 +3756,9 @@ class SqlBaseParser extends Parser { default: throw new NoViableAltException(this); } - setState(545); + setState(530); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,73,_ctx); + _alt = getInterpreter().adaptivePredict(_input,71,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -3894,14 +3805,14 @@ class SqlBaseParser extends Parser { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_comparisonOperator); + enterRule(_localctx, 54, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(549); + setState(534); _la = _input.LA(1); - if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & ((1L << (EQ - 66)) | (1L << (NEQ - 66)) | (1L << (LT - 66)) | (1L << (LTE - 66)) | (1L << (GT - 66)) | (1L << (GTE - 66)))) != 0)) ) { + if ( !(((((_la - 65)) & ~0x3f) == 0 && ((1L << (_la - 65)) & ((1L << (EQ - 65)) | (1L << (NEQ - 65)) | (1L << (LT - 65)) | (1L << (LTE - 65)) | (1L << (GT - 65)) | (1L << (GTE - 65)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -3943,12 +3854,12 @@ class SqlBaseParser extends Parser { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_booleanValue); + enterRule(_localctx, 56, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(551); + setState(536); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4001,12 +3912,12 @@ class SqlBaseParser extends Parser { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_dataType); + enterRule(_localctx, 58, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(553); + setState(538); identifier(); } } @@ -4022,6 +3933,8 @@ class SqlBaseParser extends Parser { } public static class QualifiedNameContext extends ParserRuleContext { + public IdentifierContext path; + public IdentifierContext name; public List identifier() { return getRuleContexts(IdentifierContext.class); } @@ -4049,29 +3962,31 @@ class SqlBaseParser extends Parser { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_qualifiedName); - int _la; + enterRule(_localctx, 60, RULE_qualifiedName); try { + int _alt; enterOuterAlt(_localctx, 1); { - setState(555); - identifier(); - setState(560); + setState(545); _errHandler.sync(this); - _la = _input.LA(1); - while (_la==T__3) { - { - { - setState(556); - match(T__3); - setState(557); - identifier(); + _alt = getInterpreter().adaptivePredict(_input,73,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(540); + ((QualifiedNameContext)_localctx).path = identifier(); + setState(541); + match(T__3); + } + } } - } - setState(562); + setState(547); _errHandler.sync(this); - _la = _input.LA(1); + _alt = getInterpreter().adaptivePredict(_input,73,_ctx); } + setState(548); + ((QualifiedNameContext)_localctx).name = identifier(); } } catch (RecognitionException re) { @@ -4111,11 +4026,11 @@ class SqlBaseParser extends Parser { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_tableIdentifier); + enterRule(_localctx, 62, RULE_tableIdentifier); try { enterOuterAlt(_localctx, 1); { - setState(563); + setState(550); ((TableIdentifierContext)_localctx).index = identifier(); } } @@ -4158,15 +4073,15 @@ class SqlBaseParser extends Parser { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_identifier); + enterRule(_localctx, 64, RULE_identifier); try { - setState(567); + setState(554); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(565); + setState(552); quoteIdentifier(); } break; @@ -4196,7 +4111,7 @@ class SqlBaseParser extends Parser { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(566); + setState(553); unquoteIdentifier(); } break; @@ -4263,15 +4178,15 @@ class SqlBaseParser extends Parser { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_quoteIdentifier); + enterRule(_localctx, 66, RULE_quoteIdentifier); try { - setState(571); + setState(558); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(569); + setState(556); match(QUOTED_IDENTIFIER); } break; @@ -4279,7 +4194,7 @@ class SqlBaseParser extends Parser { _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(570); + setState(557); match(BACKQUOTED_IDENTIFIER); } break; @@ -4349,15 +4264,15 @@ class SqlBaseParser extends Parser { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_unquoteIdentifier); + enterRule(_localctx, 68, RULE_unquoteIdentifier); try { - setState(576); + setState(563); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(573); + setState(560); match(IDENTIFIER); } break; @@ -4386,7 +4301,7 @@ class SqlBaseParser extends Parser { _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(574); + setState(561); nonReserved(); } break; @@ -4394,7 +4309,7 @@ class SqlBaseParser extends Parser { _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(575); + setState(562); match(DIGIT_IDENTIFIER); } break; @@ -4461,15 +4376,15 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_number); + enterRule(_localctx, 70, RULE_number); try { - setState(580); + setState(567); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(578); + setState(565); match(DECIMAL_VALUE); } break; @@ -4477,7 +4392,7 @@ class SqlBaseParser extends Parser { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(579); + setState(566); match(INTEGER_VALUE); } break; @@ -4540,12 +4455,12 @@ class SqlBaseParser extends Parser { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_nonReserved); + enterRule(_localctx, 72, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(582); + setState(569); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FROM) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << QUERY) | (1L << RLIKE) | (1L << SCHEMAS) | (1L << SHOW) | (1L << TABLES) | (1L << TEXT) | (1L << VERIFY))) != 0)) ) { _errHandler.recoverInline(this); @@ -4596,236 +4511,231 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3[\u024b\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3Z\u023e\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ - "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\3\2\3\2\3\2\3\3\3\3\3\3\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4^\n\4\f\4\16\4a\13\4\3\4\5\4d\n"+ - "\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4m\n\4\f\4\16\4p\13\4\3\4\5\4s\n\4\3"+ - "\4\3\4\3\4\3\4\5\4y\n\4\3\4\5\4|\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+ - "\4\5\4\u0087\n\4\3\4\5\4\u008a\n\4\3\4\3\4\5\4\u008e\n\4\3\5\3\5\3\5\3"+ - "\5\7\5\u0094\n\5\f\5\16\5\u0097\13\5\5\5\u0099\n\5\3\5\3\5\3\6\3\6\3\6"+ - "\3\6\3\6\3\6\7\6\u00a3\n\6\f\6\16\6\u00a6\13\6\5\6\u00a8\n\6\3\6\3\6\5"+ - "\6\u00ac\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00b3\n\7\3\b\3\b\5\b\u00b7\n\b\3"+ - "\t\3\t\5\t\u00bb\n\t\3\t\3\t\3\t\7\t\u00c0\n\t\f\t\16\t\u00c3\13\t\3\t"+ - "\5\t\u00c6\n\t\3\t\3\t\5\t\u00ca\n\t\3\t\3\t\3\t\5\t\u00cf\n\t\3\t\3\t"+ - "\5\t\u00d3\n\t\3\n\3\n\3\n\3\n\7\n\u00d9\n\n\f\n\16\n\u00dc\13\n\3\13"+ - "\5\13\u00df\n\13\3\13\3\13\3\13\7\13\u00e4\n\13\f\13\16\13\u00e7\13\13"+ - "\3\f\3\f\3\r\3\r\3\r\3\r\7\r\u00ef\n\r\f\r\16\r\u00f2\13\r\5\r\u00f4\n"+ - "\r\3\r\3\r\5\r\u00f8\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20"+ - "\3\20\5\20\u0104\n\20\3\20\5\20\u0107\n\20\3\21\3\21\7\21\u010b\n\21\f"+ - "\21\16\21\u010e\13\21\3\22\3\22\3\22\3\22\5\22\u0114\n\22\3\22\3\22\3"+ - "\22\3\22\3\22\5\22\u011b\n\22\3\23\5\23\u011e\n\23\3\23\3\23\5\23\u0122"+ - "\n\23\3\23\3\23\5\23\u0126\n\23\3\23\3\23\5\23\u012a\n\23\5\23\u012c\n"+ - "\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\7\24\u0135\n\24\f\24\16\24\u0138"+ - "\13\24\3\24\3\24\5\24\u013c\n\24\3\25\3\25\5\25\u0140\n\25\3\25\5\25\u0143"+ - "\n\25\3\25\3\25\3\25\3\25\5\25\u0149\n\25\3\25\5\25\u014c\n\25\3\25\3"+ - "\25\3\25\3\25\5\25\u0152\n\25\3\25\5\25\u0155\n\25\5\25\u0157\n\25\3\26"+ - "\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+ - "\7\27\u0168\n\27\f\27\16\27\u016b\13\27\3\27\3\27\3\27\3\27\3\27\3\27"+ - "\3\27\3\27\7\27\u0175\n\27\f\27\16\27\u0178\13\27\3\27\3\27\3\27\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\7\27\u0183\n\27\f\27\16\27\u0186\13\27\3\27"+ - "\3\27\5\27\u018a\n\27\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u0192\n\27\f"+ - "\27\16\27\u0195\13\27\3\30\3\30\5\30\u0199\n\30\3\31\5\31\u019c\n\31\3"+ - "\31\3\31\3\31\3\31\3\31\3\31\5\31\u01a4\n\31\3\31\3\31\3\31\3\31\3\31"+ - "\7\31\u01ab\n\31\f\31\16\31\u01ae\13\31\3\31\3\31\3\31\5\31\u01b3\n\31"+ - "\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u01bb\n\31\3\31\3\31\3\31\3\31\5\31"+ - "\u01c1\n\31\3\31\5\31\u01c4\n\31\3\32\3\32\3\32\3\32\5\32\u01ca\n\32\3"+ - "\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\7\32\u01d6\n\32\f\32"+ - "\16\32\u01d9\13\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3"+ - "\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u01ee\n\33\3\33\3\33"+ - "\3\33\3\33\5\33\u01f4\n\33\3\33\3\33\3\33\7\33\u01f9\n\33\f\33\16\33\u01fc"+ - "\13\33\5\33\u01fe\n\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3"+ - "\33\3\33\3\33\3\33\3\33\3\33\5\33\u020f\n\33\3\34\3\34\5\34\u0213\n\34"+ - "\3\34\3\34\5\34\u0217\n\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35"+ - "\6\35\u0222\n\35\r\35\16\35\u0223\5\35\u0226\n\35\3\36\3\36\3\37\3\37"+ - "\3 \3 \3!\3!\3!\7!\u0231\n!\f!\16!\u0234\13!\3\"\3\"\3#\3#\5#\u023a\n"+ - "#\3$\3$\5$\u023e\n$\3%\3%\3%\5%\u0243\n%\3&\3&\5&\u0247\n&\3\'\3\'\3\'"+ - "\2\4,\62(\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\66"+ - "8:<>@BDFHJL\2\20\b\2\7\7\t\t\26\26))//\63\63\4\2\37\37==\4\2\t\t//\4\2"+ - "\34\34\"\"\3\2\23\24\4\2\7\7QQ\4\2\r\r\23\23\4\2\7\7\25\25\4\2\'\'88\3"+ - "\2JK\3\2LN\3\2DI\4\2\32\32>>\16\2\b\t\21\22\26\26\30\30\33\34\36\37))"+ - "//\63\6689;=@@\u0292\2N\3\2\2\2\4Q\3\2\2\2\6\u008d\3\2\2\2\b\u0098\3\2"+ - "\2\2\n\u009c\3\2\2\2\f\u00b2\3\2\2\2\16\u00b4\3\2\2\2\20\u00b8\3\2\2\2"+ - "\22\u00d4\3\2\2\2\24\u00de\3\2\2\2\26\u00e8\3\2\2\2\30\u00f7\3\2\2\2\32"+ - "\u00f9\3\2\2\2\34\u00ff\3\2\2\2\36\u0101\3\2\2\2 \u0108\3\2\2\2\"\u011a"+ - "\3\2\2\2$\u012b\3\2\2\2&\u013b\3\2\2\2(\u0156\3\2\2\2*\u0158\3\2\2\2,"+ - "\u0189\3\2\2\2.\u0196\3\2\2\2\60\u01c3\3\2\2\2\62\u01c9\3\2\2\2\64\u020e"+ - "\3\2\2\2\66\u0216\3\2\2\28\u0225\3\2\2\2:\u0227\3\2\2\2<\u0229\3\2\2\2"+ - ">\u022b\3\2\2\2@\u022d\3\2\2\2B\u0235\3\2\2\2D\u0239\3\2\2\2F\u023d\3"+ - "\2\2\2H\u0242\3\2\2\2J\u0246\3\2\2\2L\u0248\3\2\2\2NO\5\6\4\2OP\7\2\2"+ - "\3P\3\3\2\2\2QR\5*\26\2RS\7\2\2\3S\5\3\2\2\2T\u008e\5\b\5\2Uc\7\30\2\2"+ - "V_\7\3\2\2WX\7\65\2\2X^\t\2\2\2YZ\7\33\2\2Z^\t\3\2\2[\\\7@\2\2\\^\5<\37"+ - "\2]W\3\2\2\2]Y\3\2\2\2][\3\2\2\2^a\3\2\2\2_]\3\2\2\2_`\3\2\2\2`b\3\2\2"+ - "\2a_\3\2\2\2bd\7\4\2\2cV\3\2\2\2cd\3\2\2\2de\3\2\2\2e\u008e\5\6\4\2fr"+ - "\7\22\2\2gn\7\3\2\2hi\7\65\2\2im\t\4\2\2jk\7\33\2\2km\t\3\2\2lh\3\2\2"+ - "\2lj\3\2\2\2mp\3\2\2\2nl\3\2\2\2no\3\2\2\2oq\3\2\2\2pn\3\2\2\2qs\7\4\2"+ - "\2rg\3\2\2\2rs\3\2\2\2st\3\2\2\2t\u008e\5\6\4\2uv\7;\2\2v{\7<\2\2wy\7"+ - "\'\2\2xw\3\2\2\2xy\3\2\2\2yz\3\2\2\2z|\7P\2\2{x\3\2\2\2{|\3\2\2\2|\u008e"+ - "\3\2\2\2}~\7;\2\2~\177\7\21\2\2\177\u0080\t\5\2\2\u0080\u008e\5B\"\2\u0081"+ - "\u0082\t\6\2\2\u0082\u008e\5B\"\2\u0083\u0084\7;\2\2\u0084\u0089\7\36"+ - "\2\2\u0085\u0087\7\'\2\2\u0086\u0085\3\2\2\2\u0086\u0087\3\2\2\2\u0087"+ - "\u0088\3\2\2\2\u0088\u008a\7P\2\2\u0089\u0086\3\2\2\2\u0089\u008a\3\2"+ - "\2\2\u008a\u008e\3\2\2\2\u008b\u008c\7;\2\2\u008c\u008e\79\2\2\u008dT"+ - "\3\2\2\2\u008dU\3\2\2\2\u008df\3\2\2\2\u008du\3\2\2\2\u008d}\3\2\2\2\u008d"+ - "\u0081\3\2\2\2\u008d\u0083\3\2\2\2\u008d\u008b\3\2\2\2\u008e\7\3\2\2\2"+ - "\u008f\u0090\7C\2\2\u0090\u0095\5\32\16\2\u0091\u0092\7\5\2\2\u0092\u0094"+ - "\5\32\16\2\u0093\u0091\3\2\2\2\u0094\u0097\3\2\2\2\u0095\u0093\3\2\2\2"+ - "\u0095\u0096\3\2\2\2\u0096\u0099\3\2\2\2\u0097\u0095\3\2\2\2\u0098\u008f"+ - "\3\2\2\2\u0098\u0099\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009b\5\n\6\2\u009b"+ - "\t\3\2\2\2\u009c\u00a7\5\f\7\2\u009d\u009e\7\61\2\2\u009e\u009f\7\17\2"+ - "\2\u009f\u00a4\5\16\b\2\u00a0\u00a1\7\5\2\2\u00a1\u00a3\5\16\b\2\u00a2"+ - "\u00a0\3\2\2\2\u00a3\u00a6\3\2\2\2\u00a4\u00a2\3\2\2\2\u00a4\u00a5\3\2"+ - "\2\2\u00a5\u00a8\3\2\2\2\u00a6\u00a4\3\2\2\2\u00a7\u009d\3\2\2\2\u00a7"+ - "\u00a8\3\2\2\2\u00a8\u00ab\3\2\2\2\u00a9\u00aa\7(\2\2\u00aa\u00ac\t\7"+ - "\2\2\u00ab\u00a9\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\13\3\2\2\2\u00ad\u00b3"+ - "\5\20\t\2\u00ae\u00af\7\3\2\2\u00af\u00b0\5\n\6\2\u00b0\u00b1\7\4\2\2"+ - "\u00b1\u00b3\3\2\2\2\u00b2\u00ad\3\2\2\2\u00b2\u00ae\3\2\2\2\u00b3\r\3"+ - "\2\2\2\u00b4\u00b6\5*\26\2\u00b5\u00b7\t\b\2\2\u00b6\u00b5\3\2\2\2\u00b6"+ - "\u00b7\3\2\2\2\u00b7\17\3\2\2\2\u00b8\u00ba\7:\2\2\u00b9\u00bb\5\34\17"+ - "\2\u00ba\u00b9\3\2\2\2\u00ba\u00bb\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00c1"+ - "\5\36\20\2\u00bd\u00be\7\5\2\2\u00be\u00c0\5\36\20\2\u00bf\u00bd\3\2\2"+ - "\2\u00c0\u00c3\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c5"+ - "\3\2\2\2\u00c3\u00c1\3\2\2\2\u00c4\u00c6\5\22\n\2\u00c5\u00c4\3\2\2\2"+ - "\u00c5\u00c6\3\2\2\2\u00c6\u00c9\3\2\2\2\u00c7\u00c8\7B\2\2\u00c8\u00ca"+ - "\5,\27\2\u00c9\u00c7\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00ce\3\2\2\2\u00cb"+ - "\u00cc\7 \2\2\u00cc\u00cd\7\17\2\2\u00cd\u00cf\5\24\13\2\u00ce\u00cb\3"+ - "\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d2\3\2\2\2\u00d0\u00d1\7!\2\2\u00d1"+ - "\u00d3\5,\27\2\u00d2\u00d0\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3\21\3\2\2"+ - "\2\u00d4\u00d5\7\34\2\2\u00d5\u00da\5 \21\2\u00d6\u00d7\7\5\2\2\u00d7"+ - "\u00d9\5 \21\2\u00d8\u00d6\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3\2"+ - "\2\2\u00da\u00db\3\2\2\2\u00db\23\3\2\2\2\u00dc\u00da\3\2\2\2\u00dd\u00df"+ - "\5\34\17\2\u00de\u00dd\3\2\2\2\u00de\u00df\3\2\2\2\u00df\u00e0\3\2\2\2"+ - "\u00e0\u00e5\5\26\f\2\u00e1\u00e2\7\5\2\2\u00e2\u00e4\5\26\f\2\u00e3\u00e1"+ - "\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5\u00e3\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6"+ - "\25\3\2\2\2\u00e7\u00e5\3\2\2\2\u00e8\u00e9\5\30\r\2\u00e9\27\3\2\2\2"+ - "\u00ea\u00f3\7\3\2\2\u00eb\u00f0\5*\26\2\u00ec\u00ed\7\5\2\2\u00ed\u00ef"+ - "\5*\26\2\u00ee\u00ec\3\2\2\2\u00ef\u00f2\3\2\2\2\u00f0\u00ee\3\2\2\2\u00f0"+ - "\u00f1\3\2\2\2\u00f1\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f3\u00eb\3\2"+ - "\2\2\u00f3\u00f4\3\2\2\2\u00f4\u00f5\3\2\2\2\u00f5\u00f8\7\4\2\2\u00f6"+ - "\u00f8\5*\26\2\u00f7\u00ea\3\2\2\2\u00f7\u00f6\3\2\2\2\u00f8\31\3\2\2"+ - "\2\u00f9\u00fa\5D#\2\u00fa\u00fb\7\f\2\2\u00fb\u00fc\7\3\2\2\u00fc\u00fd"+ - "\5\n\6\2\u00fd\u00fe\7\4\2\2\u00fe\33\3\2\2\2\u00ff\u0100\t\t\2\2\u0100"+ - "\35\3\2\2\2\u0101\u0106\5*\26\2\u0102\u0104\7\f\2\2\u0103\u0102\3\2\2"+ - "\2\u0103\u0104\3\2\2\2\u0104\u0105\3\2\2\2\u0105\u0107\5D#\2\u0106\u0103"+ - "\3\2\2\2\u0106\u0107\3\2\2\2\u0107\37\3\2\2\2\u0108\u010c\5(\25\2\u0109"+ - "\u010b\5\"\22\2\u010a\u0109\3\2\2\2\u010b\u010e\3\2\2\2\u010c\u010a\3"+ - "\2\2\2\u010c\u010d\3\2\2\2\u010d!\3\2\2\2\u010e\u010c\3\2\2\2\u010f\u0110"+ - "\5$\23\2\u0110\u0111\7%\2\2\u0111\u0113\5(\25\2\u0112\u0114\5&\24\2\u0113"+ - "\u0112\3\2\2\2\u0113\u0114\3\2\2\2\u0114\u011b\3\2\2\2\u0115\u0116\7+"+ - "\2\2\u0116\u0117\5$\23\2\u0117\u0118\7%\2\2\u0118\u0119\5(\25\2\u0119"+ - "\u011b\3\2\2\2\u011a\u010f\3\2\2\2\u011a\u0115\3\2\2\2\u011b#\3\2\2\2"+ - "\u011c\u011e\7#\2\2\u011d\u011c\3\2\2\2\u011d\u011e\3\2\2\2\u011e\u012c"+ - "\3\2\2\2\u011f\u0121\7&\2\2\u0120\u0122\7\62\2\2\u0121\u0120\3\2\2\2\u0121"+ - "\u0122\3\2\2\2\u0122\u012c\3\2\2\2\u0123\u0125\7\67\2\2\u0124\u0126\7"+ - "\62\2\2\u0125\u0124\3\2\2\2\u0125\u0126\3\2\2\2\u0126\u012c\3\2\2\2\u0127"+ - "\u0129\7\35\2\2\u0128\u012a\7\62\2\2\u0129\u0128\3\2\2\2\u0129\u012a\3"+ - "\2\2\2\u012a\u012c\3\2\2\2\u012b\u011d\3\2\2\2\u012b\u011f\3\2\2\2\u012b"+ - "\u0123\3\2\2\2\u012b\u0127\3\2\2\2\u012c%\3\2\2\2\u012d\u012e\7.\2\2\u012e"+ - "\u013c\5,\27\2\u012f\u0130\7?\2\2\u0130\u0131\7\3\2\2\u0131\u0136\5D#"+ - "\2\u0132\u0133\7\5\2\2\u0133\u0135\5D#\2\u0134\u0132\3\2\2\2\u0135\u0138"+ - "\3\2\2\2\u0136\u0134\3\2\2\2\u0136\u0137\3\2\2\2\u0137\u0139\3\2\2\2\u0138"+ - "\u0136\3\2\2\2\u0139\u013a\7\4\2\2\u013a\u013c\3\2\2\2\u013b\u012d\3\2"+ - "\2\2\u013b\u012f\3\2\2\2\u013c\'\3\2\2\2\u013d\u0142\5B\"\2\u013e\u0140"+ - "\7\f\2\2\u013f\u013e\3\2\2\2\u013f\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141"+ - "\u0143\5@!\2\u0142\u013f\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u0157\3\2\2"+ - "\2\u0144\u0145\7\3\2\2\u0145\u0146\5\n\6\2\u0146\u014b\7\4\2\2\u0147\u0149"+ - "\7\f\2\2\u0148\u0147\3\2\2\2\u0148\u0149\3\2\2\2\u0149\u014a\3\2\2\2\u014a"+ - "\u014c\5@!\2\u014b\u0148\3\2\2\2\u014b\u014c\3\2\2\2\u014c\u0157\3\2\2"+ - "\2\u014d\u014e\7\3\2\2\u014e\u014f\5 \21\2\u014f\u0154\7\4\2\2\u0150\u0152"+ - "\7\f\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2\2\2\u0152\u0153\3\2\2\2\u0153"+ - "\u0155\5@!\2\u0154\u0151\3\2\2\2\u0154\u0155\3\2\2\2\u0155\u0157\3\2\2"+ - "\2\u0156\u013d\3\2\2\2\u0156\u0144\3\2\2\2\u0156\u014d\3\2\2\2\u0157)"+ - "\3\2\2\2\u0158\u0159\5,\27\2\u0159+\3\2\2\2\u015a\u015b\b\27\1\2\u015b"+ - "\u015c\7,\2\2\u015c\u018a\5,\27\n\u015d\u015e\7\27\2\2\u015e\u015f\7\3"+ - "\2\2\u015f\u0160\5\b\5\2\u0160\u0161\7\4\2\2\u0161\u018a\3\2\2\2\u0162"+ - "\u0163\7\66\2\2\u0163\u0164\7\3\2\2\u0164\u0169\7P\2\2\u0165\u0166\7\5"+ - "\2\2\u0166\u0168\7P\2\2\u0167\u0165\3\2\2\2\u0168\u016b\3\2\2\2\u0169"+ - "\u0167\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u016c\3\2\2\2\u016b\u0169\3\2"+ - "\2\2\u016c\u018a\7\4\2\2\u016d\u016e\7*\2\2\u016e\u016f\7\3\2\2\u016f"+ - "\u0170\5@!\2\u0170\u0171\7\5\2\2\u0171\u0176\7P\2\2\u0172\u0173\7\5\2"+ - "\2\u0173\u0175\7P\2\2\u0174\u0172\3\2\2\2\u0175\u0178\3\2\2\2\u0176\u0174"+ - "\3\2\2\2\u0176\u0177\3\2\2\2\u0177\u0179\3\2\2\2\u0178\u0176\3\2\2\2\u0179"+ - "\u017a\7\4\2\2\u017a\u018a\3\2\2\2\u017b\u017c\7*\2\2\u017c\u017d\7\3"+ - "\2\2\u017d\u017e\7P\2\2\u017e\u017f\7\5\2\2\u017f\u0184\7P\2\2\u0180\u0181"+ - "\7\5\2\2\u0181\u0183\7P\2\2\u0182\u0180\3\2\2\2\u0183\u0186\3\2\2\2\u0184"+ - "\u0182\3\2\2\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2\2\2\u0186\u0184\3\2"+ - "\2\2\u0187\u018a\7\4\2\2\u0188\u018a\5.\30\2\u0189\u015a\3\2\2\2\u0189"+ - "\u015d\3\2\2\2\u0189\u0162\3\2\2\2\u0189\u016d\3\2\2\2\u0189\u017b\3\2"+ - "\2\2\u0189\u0188\3\2\2\2\u018a\u0193\3\2\2\2\u018b\u018c\f\4\2\2\u018c"+ - "\u018d\7\n\2\2\u018d\u0192\5,\27\5\u018e\u018f\f\3\2\2\u018f\u0190\7\60"+ - "\2\2\u0190\u0192\5,\27\4\u0191\u018b\3\2\2\2\u0191\u018e\3\2\2\2\u0192"+ - "\u0195\3\2\2\2\u0193\u0191\3\2\2\2\u0193\u0194\3\2\2\2\u0194-\3\2\2\2"+ - "\u0195\u0193\3\2\2\2\u0196\u0198\5\62\32\2\u0197\u0199\5\60\31\2\u0198"+ - "\u0197\3\2\2\2\u0198\u0199\3\2\2\2\u0199/\3\2\2\2\u019a\u019c\7,\2\2\u019b"+ - "\u019a\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019d\3\2\2\2\u019d\u019e\7\16"+ - "\2\2\u019e\u019f\5\62\32\2\u019f\u01a0\7\n\2\2\u01a0\u01a1\5\62\32\2\u01a1"+ - "\u01c4\3\2\2\2\u01a2\u01a4\7,\2\2\u01a3\u01a2\3\2\2\2\u01a3\u01a4\3\2"+ - "\2\2\u01a4\u01a5\3\2\2\2\u01a5\u01a6\7\"\2\2\u01a6\u01a7\7\3\2\2\u01a7"+ - "\u01ac\5*\26\2\u01a8\u01a9\7\5\2\2\u01a9\u01ab\5*\26\2\u01aa\u01a8\3\2"+ - "\2\2\u01ab\u01ae\3\2\2\2\u01ac\u01aa\3\2\2\2\u01ac\u01ad\3\2\2\2\u01ad"+ - "\u01af\3\2\2\2\u01ae\u01ac\3\2\2\2\u01af\u01b0\7\4\2\2\u01b0\u01c4\3\2"+ - "\2\2\u01b1\u01b3\7,\2\2\u01b2\u01b1\3\2\2\2\u01b2\u01b3\3\2\2\2\u01b3"+ - "\u01b4\3\2\2\2\u01b4\u01b5\7\"\2\2\u01b5\u01b6\7\3\2\2\u01b6\u01b7\5\b"+ - "\5\2\u01b7\u01b8\7\4\2\2\u01b8\u01c4\3\2\2\2\u01b9\u01bb\7,\2\2\u01ba"+ - "\u01b9\3\2\2\2\u01ba\u01bb\3\2\2\2\u01bb\u01bc\3\2\2\2\u01bc\u01bd\t\n"+ - "\2\2\u01bd\u01c4\5\62\32\2\u01be\u01c0\7$\2\2\u01bf\u01c1\7,\2\2\u01c0"+ - "\u01bf\3\2\2\2\u01c0\u01c1\3\2\2\2\u01c1\u01c2\3\2\2\2\u01c2\u01c4\7-"+ - "\2\2\u01c3\u019b\3\2\2\2\u01c3\u01a3\3\2\2\2\u01c3\u01b2\3\2\2\2\u01c3"+ - "\u01ba\3\2\2\2\u01c3\u01be\3\2\2\2\u01c4\61\3\2\2\2\u01c5\u01c6\b\32\1"+ - "\2\u01c6\u01ca\5\64\33\2\u01c7\u01c8\t\13\2\2\u01c8\u01ca\5\62\32\6\u01c9"+ - "\u01c5\3\2\2\2\u01c9\u01c7\3\2\2\2\u01ca\u01d7\3\2\2\2\u01cb\u01cc\f\5"+ - "\2\2\u01cc\u01cd\t\f\2\2\u01cd\u01d6\5\62\32\6\u01ce\u01cf\f\4\2\2\u01cf"+ - "\u01d0\t\13\2\2\u01d0\u01d6\5\62\32\5\u01d1\u01d2\f\3\2\2\u01d2\u01d3"+ - "\5:\36\2\u01d3\u01d4\5\62\32\4\u01d4\u01d6\3\2\2\2\u01d5\u01cb\3\2\2\2"+ - "\u01d5\u01ce\3\2\2\2\u01d5\u01d1\3\2\2\2\u01d6\u01d9\3\2\2\2\u01d7\u01d5"+ - "\3\2\2\2\u01d7\u01d8\3\2\2\2\u01d8\63\3\2\2\2\u01d9\u01d7\3\2\2\2\u01da"+ - "\u01db\7\20\2\2\u01db\u01dc\7\3\2\2\u01dc\u01dd\5*\26\2\u01dd\u01de\7"+ - "\f\2\2\u01de\u01df\5> \2\u01df\u01e0\7\4\2\2\u01e0\u020f\3\2\2\2\u01e1"+ - "\u01e2\7\31\2\2\u01e2\u01e3\7\3\2\2\u01e3\u01e4\5D#\2\u01e4\u01e5\7\34"+ - "\2\2\u01e5\u01e6\5\62\32\2\u01e6\u01e7\7\4\2\2\u01e7\u020f\3\2\2\2\u01e8"+ - "\u020f\58\35\2\u01e9\u020f\7L\2\2\u01ea\u01eb\5\66\34\2\u01eb\u01ec\7"+ - "\6\2\2\u01ec\u01ee\3\2\2\2\u01ed\u01ea\3\2\2\2\u01ed\u01ee\3\2\2\2\u01ee"+ - "\u01ef\3\2\2\2\u01ef\u020f\7L\2\2\u01f0\u01f1\5D#\2\u01f1\u01fd\7\3\2"+ - "\2\u01f2\u01f4\5\34\17\2\u01f3\u01f2\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4"+ - "\u01f5\3\2\2\2\u01f5\u01fa\5*\26\2\u01f6\u01f7\7\5\2\2\u01f7\u01f9\5*"+ - "\26\2\u01f8\u01f6\3\2\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa"+ - "\u01fb\3\2\2\2\u01fb\u01fe\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01f3\3\2"+ - "\2\2\u01fd\u01fe\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff\u0200\7\4\2\2\u0200"+ - "\u020f\3\2\2\2\u0201\u0202\7\3\2\2\u0202\u0203\5\b\5\2\u0203\u0204\7\4"+ - "\2\2\u0204\u020f\3\2\2\2\u0205\u020f\5\66\34\2\u0206\u0207\5\66\34\2\u0207"+ - "\u0208\7\6\2\2\u0208\u0209\5D#\2\u0209\u020f\3\2\2\2\u020a\u020b\7\3\2"+ - "\2\u020b\u020c\5*\26\2\u020c\u020d\7\4\2\2\u020d\u020f\3\2\2\2\u020e\u01da"+ - "\3\2\2\2\u020e\u01e1\3\2\2\2\u020e\u01e8\3\2\2\2\u020e\u01e9\3\2\2\2\u020e"+ - "\u01ed\3\2\2\2\u020e\u01f0\3\2\2\2\u020e\u0201\3\2\2\2\u020e\u0205\3\2"+ - "\2\2\u020e\u0206\3\2\2\2\u020e\u020a\3\2\2\2\u020f\65\3\2\2\2\u0210\u0213"+ - "\5D#\2\u0211\u0213\5B\"\2\u0212\u0210\3\2\2\2\u0212\u0211\3\2\2\2\u0213"+ - "\u0214\3\2\2\2\u0214\u0215\7\6\2\2\u0215\u0217\3\2\2\2\u0216\u0212\3\2"+ - "\2\2\u0216\u0217\3\2\2\2\u0217\u0218\3\2\2\2\u0218\u0219\5D#\2\u0219\67"+ - "\3\2\2\2\u021a\u0226\7-\2\2\u021b\u021c\5D#\2\u021c\u021d\7P\2\2\u021d"+ - "\u0226\3\2\2\2\u021e\u0226\5J&\2\u021f\u0226\5<\37\2\u0220\u0222\7P\2"+ - "\2\u0221\u0220\3\2\2\2\u0222\u0223\3\2\2\2\u0223\u0221\3\2\2\2\u0223\u0224"+ - "\3\2\2\2\u0224\u0226\3\2\2\2\u0225\u021a\3\2\2\2\u0225\u021b\3\2\2\2\u0225"+ - "\u021e\3\2\2\2\u0225\u021f\3\2\2\2\u0225\u0221\3\2\2\2\u02269\3\2\2\2"+ - "\u0227\u0228\t\r\2\2\u0228;\3\2\2\2\u0229\u022a\t\16\2\2\u022a=\3\2\2"+ - "\2\u022b\u022c\5D#\2\u022c?\3\2\2\2\u022d\u0232\5D#\2\u022e\u022f\7\6"+ - "\2\2\u022f\u0231\5D#\2\u0230\u022e\3\2\2\2\u0231\u0234\3\2\2\2\u0232\u0230"+ - "\3\2\2\2\u0232\u0233\3\2\2\2\u0233A\3\2\2\2\u0234\u0232\3\2\2\2\u0235"+ - "\u0236\5D#\2\u0236C\3\2\2\2\u0237\u023a\5F$\2\u0238\u023a\5H%\2\u0239"+ - "\u0237\3\2\2\2\u0239\u0238\3\2\2\2\u023aE\3\2\2\2\u023b\u023e\7U\2\2\u023c"+ - "\u023e\7V\2\2\u023d\u023b\3\2\2\2\u023d\u023c\3\2\2\2\u023eG\3\2\2\2\u023f"+ - "\u0243\7S\2\2\u0240\u0243\5L\'\2\u0241\u0243\7T\2\2\u0242\u023f\3\2\2"+ - "\2\u0242\u0240\3\2\2\2\u0242\u0241\3\2\2\2\u0243I\3\2\2\2\u0244\u0247"+ - "\7R\2\2\u0245\u0247\7Q\2\2\u0246\u0244\3\2\2\2\u0246\u0245\3\2\2\2\u0247"+ - "K\3\2\2\2\u0248\u0249\t\17\2\2\u0249M\3\2\2\2R]_clnrx{\u0086\u0089\u008d"+ - "\u0095\u0098\u00a4\u00a7\u00ab\u00b2\u00b6\u00ba\u00c1\u00c5\u00c9\u00ce"+ - "\u00d2\u00da\u00de\u00e5\u00f0\u00f3\u00f7\u0103\u0106\u010c\u0113\u011a"+ - "\u011d\u0121\u0125\u0129\u012b\u0136\u013b\u013f\u0142\u0148\u014b\u0151"+ - "\u0154\u0156\u0169\u0176\u0184\u0189\u0191\u0193\u0198\u019b\u01a3\u01ac"+ - "\u01b2\u01ba\u01c0\u01c3\u01c9\u01d5\u01d7\u01ed\u01f3\u01fa\u01fd\u020e"+ - "\u0212\u0216\u0223\u0225\u0232\u0239\u023d\u0242\u0246"; + "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\\\n\4\f\4\16\4_\13\4\3\4\5\4b\n\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\3\4\7\4k\n\4\f\4\16\4n\13\4\3\4\5\4q\n\4\3\4\3\4\3"+ + "\4\3\4\5\4w\n\4\3\4\5\4z\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u0085"+ + "\n\4\3\4\5\4\u0088\n\4\3\4\3\4\5\4\u008c\n\4\3\5\3\5\3\5\3\5\7\5\u0092"+ + "\n\5\f\5\16\5\u0095\13\5\5\5\u0097\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6"+ + "\7\6\u00a1\n\6\f\6\16\6\u00a4\13\6\5\6\u00a6\n\6\3\6\3\6\5\6\u00aa\n\6"+ + "\3\7\3\7\3\7\3\7\3\7\5\7\u00b1\n\7\3\b\3\b\5\b\u00b5\n\b\3\t\3\t\5\t\u00b9"+ + "\n\t\3\t\3\t\3\t\7\t\u00be\n\t\f\t\16\t\u00c1\13\t\3\t\5\t\u00c4\n\t\3"+ + "\t\3\t\5\t\u00c8\n\t\3\t\3\t\3\t\5\t\u00cd\n\t\3\t\3\t\5\t\u00d1\n\t\3"+ + "\n\3\n\3\n\3\n\7\n\u00d7\n\n\f\n\16\n\u00da\13\n\3\13\5\13\u00dd\n\13"+ + "\3\13\3\13\3\13\7\13\u00e2\n\13\f\13\16\13\u00e5\13\13\3\f\3\f\3\r\3\r"+ + "\3\r\3\r\7\r\u00ed\n\r\f\r\16\r\u00f0\13\r\5\r\u00f2\n\r\3\r\3\r\5\r\u00f6"+ + "\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20\3\20\5\20\u0102\n\20"+ + "\3\20\5\20\u0105\n\20\3\21\3\21\7\21\u0109\n\21\f\21\16\21\u010c\13\21"+ + "\3\22\3\22\3\22\3\22\5\22\u0112\n\22\3\22\3\22\3\22\3\22\3\22\5\22\u0119"+ + "\n\22\3\23\5\23\u011c\n\23\3\23\3\23\5\23\u0120\n\23\3\23\3\23\5\23\u0124"+ + "\n\23\3\23\3\23\5\23\u0128\n\23\5\23\u012a\n\23\3\24\3\24\3\24\3\24\3"+ + "\24\3\24\3\24\7\24\u0133\n\24\f\24\16\24\u0136\13\24\3\24\3\24\5\24\u013a"+ + "\n\24\3\25\3\25\5\25\u013e\n\25\3\25\5\25\u0141\n\25\3\25\3\25\3\25\3"+ + "\25\5\25\u0147\n\25\3\25\5\25\u014a\n\25\3\25\3\25\3\25\3\25\5\25\u0150"+ + "\n\25\3\25\5\25\u0153\n\25\5\25\u0155\n\25\3\26\3\26\3\27\3\27\3\27\3"+ + "\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u0166\n\27\f\27"+ + "\16\27\u0169\13\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u0173"+ + "\n\27\f\27\16\27\u0176\13\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3"+ + "\27\7\27\u0181\n\27\f\27\16\27\u0184\13\27\3\27\3\27\5\27\u0188\n\27\3"+ + "\27\3\27\3\27\3\27\3\27\3\27\7\27\u0190\n\27\f\27\16\27\u0193\13\27\3"+ + "\30\3\30\5\30\u0197\n\30\3\31\5\31\u019a\n\31\3\31\3\31\3\31\3\31\3\31"+ + "\3\31\5\31\u01a2\n\31\3\31\3\31\3\31\3\31\3\31\7\31\u01a9\n\31\f\31\16"+ + "\31\u01ac\13\31\3\31\3\31\3\31\5\31\u01b1\n\31\3\31\3\31\3\31\3\31\3\31"+ + "\3\31\5\31\u01b9\n\31\3\31\3\31\3\31\3\31\5\31\u01bf\n\31\3\31\5\31\u01c2"+ + "\n\31\3\32\3\32\3\32\3\32\5\32\u01c8\n\32\3\32\3\32\3\32\3\32\3\32\3\32"+ + "\3\32\3\32\3\32\3\32\7\32\u01d4\n\32\f\32\16\32\u01d7\13\32\3\33\3\33"+ + "\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33"+ + "\3\33\3\33\3\33\5\33\u01ec\n\33\3\33\3\33\3\33\3\33\5\33\u01f2\n\33\3"+ + "\33\3\33\3\33\7\33\u01f7\n\33\f\33\16\33\u01fa\13\33\5\33\u01fc\n\33\3"+ + "\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u020a"+ + "\n\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\6\34\u0213\n\34\r\34\16\34\u0214"+ + "\5\34\u0217\n\34\3\35\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \7 \u0222\n \f"+ + " \16 \u0225\13 \3 \3 \3!\3!\3\"\3\"\5\"\u022d\n\"\3#\3#\5#\u0231\n#\3"+ + "$\3$\3$\5$\u0236\n$\3%\3%\5%\u023a\n%\3&\3&\3&\2\4,\62\'\2\4\6\b\n\f\16"+ + "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJ\2\20\b\2\7\7\t"+ + "\t\26\26))//\63\63\4\2\37\37==\4\2\t\t//\4\2\34\34\"\"\3\2\23\24\4\2\7"+ + "\7PP\4\2\r\r\23\23\4\2\7\7\25\25\4\2\'\'88\3\2IJ\3\2KM\3\2CH\4\2\32\32"+ + ">>\16\2\b\t\21\22\26\26\30\30\33\34\36\37))//\63\6689;=@@\u0284\2L\3\2"+ + "\2\2\4O\3\2\2\2\6\u008b\3\2\2\2\b\u0096\3\2\2\2\n\u009a\3\2\2\2\f\u00b0"+ + "\3\2\2\2\16\u00b2\3\2\2\2\20\u00b6\3\2\2\2\22\u00d2\3\2\2\2\24\u00dc\3"+ + "\2\2\2\26\u00e6\3\2\2\2\30\u00f5\3\2\2\2\32\u00f7\3\2\2\2\34\u00fd\3\2"+ + "\2\2\36\u00ff\3\2\2\2 \u0106\3\2\2\2\"\u0118\3\2\2\2$\u0129\3\2\2\2&\u0139"+ + "\3\2\2\2(\u0154\3\2\2\2*\u0156\3\2\2\2,\u0187\3\2\2\2.\u0194\3\2\2\2\60"+ + "\u01c1\3\2\2\2\62\u01c7\3\2\2\2\64\u0209\3\2\2\2\66\u0216\3\2\2\28\u0218"+ + "\3\2\2\2:\u021a\3\2\2\2<\u021c\3\2\2\2>\u0223\3\2\2\2@\u0228\3\2\2\2B"+ + "\u022c\3\2\2\2D\u0230\3\2\2\2F\u0235\3\2\2\2H\u0239\3\2\2\2J\u023b\3\2"+ + "\2\2LM\5\6\4\2MN\7\2\2\3N\3\3\2\2\2OP\5*\26\2PQ\7\2\2\3Q\5\3\2\2\2R\u008c"+ + "\5\b\5\2Sa\7\30\2\2T]\7\3\2\2UV\7\65\2\2V\\\t\2\2\2WX\7\33\2\2X\\\t\3"+ + "\2\2YZ\7@\2\2Z\\\5:\36\2[U\3\2\2\2[W\3\2\2\2[Y\3\2\2\2\\_\3\2\2\2][\3"+ + "\2\2\2]^\3\2\2\2^`\3\2\2\2_]\3\2\2\2`b\7\4\2\2aT\3\2\2\2ab\3\2\2\2bc\3"+ + "\2\2\2c\u008c\5\6\4\2dp\7\22\2\2el\7\3\2\2fg\7\65\2\2gk\t\4\2\2hi\7\33"+ + "\2\2ik\t\3\2\2jf\3\2\2\2jh\3\2\2\2kn\3\2\2\2lj\3\2\2\2lm\3\2\2\2mo\3\2"+ + "\2\2nl\3\2\2\2oq\7\4\2\2pe\3\2\2\2pq\3\2\2\2qr\3\2\2\2r\u008c\5\6\4\2"+ + "st\7;\2\2ty\7<\2\2uw\7\'\2\2vu\3\2\2\2vw\3\2\2\2wx\3\2\2\2xz\7O\2\2yv"+ + "\3\2\2\2yz\3\2\2\2z\u008c\3\2\2\2{|\7;\2\2|}\7\21\2\2}~\t\5\2\2~\u008c"+ + "\5@!\2\177\u0080\t\6\2\2\u0080\u008c\5@!\2\u0081\u0082\7;\2\2\u0082\u0087"+ + "\7\36\2\2\u0083\u0085\7\'\2\2\u0084\u0083\3\2\2\2\u0084\u0085\3\2\2\2"+ + "\u0085\u0086\3\2\2\2\u0086\u0088\7O\2\2\u0087\u0084\3\2\2\2\u0087\u0088"+ + "\3\2\2\2\u0088\u008c\3\2\2\2\u0089\u008a\7;\2\2\u008a\u008c\79\2\2\u008b"+ + "R\3\2\2\2\u008bS\3\2\2\2\u008bd\3\2\2\2\u008bs\3\2\2\2\u008b{\3\2\2\2"+ + "\u008b\177\3\2\2\2\u008b\u0081\3\2\2\2\u008b\u0089\3\2\2\2\u008c\7\3\2"+ + "\2\2\u008d\u008e\7B\2\2\u008e\u0093\5\32\16\2\u008f\u0090\7\5\2\2\u0090"+ + "\u0092\5\32\16\2\u0091\u008f\3\2\2\2\u0092\u0095\3\2\2\2\u0093\u0091\3"+ + "\2\2\2\u0093\u0094\3\2\2\2\u0094\u0097\3\2\2\2\u0095\u0093\3\2\2\2\u0096"+ + "\u008d\3\2\2\2\u0096\u0097\3\2\2\2\u0097\u0098\3\2\2\2\u0098\u0099\5\n"+ + "\6\2\u0099\t\3\2\2\2\u009a\u00a5\5\f\7\2\u009b\u009c\7\61\2\2\u009c\u009d"+ + "\7\17\2\2\u009d\u00a2\5\16\b\2\u009e\u009f\7\5\2\2\u009f\u00a1\5\16\b"+ + "\2\u00a0\u009e\3\2\2\2\u00a1\u00a4\3\2\2\2\u00a2\u00a0\3\2\2\2\u00a2\u00a3"+ + "\3\2\2\2\u00a3\u00a6\3\2\2\2\u00a4\u00a2\3\2\2\2\u00a5\u009b\3\2\2\2\u00a5"+ + "\u00a6\3\2\2\2\u00a6\u00a9\3\2\2\2\u00a7\u00a8\7(\2\2\u00a8\u00aa\t\7"+ + "\2\2\u00a9\u00a7\3\2\2\2\u00a9\u00aa\3\2\2\2\u00aa\13\3\2\2\2\u00ab\u00b1"+ + "\5\20\t\2\u00ac\u00ad\7\3\2\2\u00ad\u00ae\5\n\6\2\u00ae\u00af\7\4\2\2"+ + "\u00af\u00b1\3\2\2\2\u00b0\u00ab\3\2\2\2\u00b0\u00ac\3\2\2\2\u00b1\r\3"+ + "\2\2\2\u00b2\u00b4\5*\26\2\u00b3\u00b5\t\b\2\2\u00b4\u00b3\3\2\2\2\u00b4"+ + "\u00b5\3\2\2\2\u00b5\17\3\2\2\2\u00b6\u00b8\7:\2\2\u00b7\u00b9\5\34\17"+ + "\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00bf"+ + "\5\36\20\2\u00bb\u00bc\7\5\2\2\u00bc\u00be\5\36\20\2\u00bd\u00bb\3\2\2"+ + "\2\u00be\u00c1\3\2\2\2\u00bf\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c3"+ + "\3\2\2\2\u00c1\u00bf\3\2\2\2\u00c2\u00c4\5\22\n\2\u00c3\u00c2\3\2\2\2"+ + "\u00c3\u00c4\3\2\2\2\u00c4\u00c7\3\2\2\2\u00c5\u00c6\7A\2\2\u00c6\u00c8"+ + "\5,\27\2\u00c7\u00c5\3\2\2\2\u00c7\u00c8\3\2\2\2\u00c8\u00cc\3\2\2\2\u00c9"+ + "\u00ca\7 \2\2\u00ca\u00cb\7\17\2\2\u00cb\u00cd\5\24\13\2\u00cc\u00c9\3"+ + "\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d0\3\2\2\2\u00ce\u00cf\7!\2\2\u00cf"+ + "\u00d1\5,\27\2\u00d0\u00ce\3\2\2\2\u00d0\u00d1\3\2\2\2\u00d1\21\3\2\2"+ + "\2\u00d2\u00d3\7\34\2\2\u00d3\u00d8\5 \21\2\u00d4\u00d5\7\5\2\2\u00d5"+ + "\u00d7\5 \21\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2"+ + "\2\2\u00d8\u00d9\3\2\2\2\u00d9\23\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00dd"+ + "\5\34\17\2\u00dc\u00db\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd\u00de\3\2\2\2"+ + "\u00de\u00e3\5\26\f\2\u00df\u00e0\7\5\2\2\u00e0\u00e2\5\26\f\2\u00e1\u00df"+ + "\3\2\2\2\u00e2\u00e5\3\2\2\2\u00e3\u00e1\3\2\2\2\u00e3\u00e4\3\2\2\2\u00e4"+ + "\25\3\2\2\2\u00e5\u00e3\3\2\2\2\u00e6\u00e7\5\30\r\2\u00e7\27\3\2\2\2"+ + "\u00e8\u00f1\7\3\2\2\u00e9\u00ee\5*\26\2\u00ea\u00eb\7\5\2\2\u00eb\u00ed"+ + "\5*\26\2\u00ec\u00ea\3\2\2\2\u00ed\u00f0\3\2\2\2\u00ee\u00ec\3\2\2\2\u00ee"+ + "\u00ef\3\2\2\2\u00ef\u00f2\3\2\2\2\u00f0\u00ee\3\2\2\2\u00f1\u00e9\3\2"+ + "\2\2\u00f1\u00f2\3\2\2\2\u00f2\u00f3\3\2\2\2\u00f3\u00f6\7\4\2\2\u00f4"+ + "\u00f6\5*\26\2\u00f5\u00e8\3\2\2\2\u00f5\u00f4\3\2\2\2\u00f6\31\3\2\2"+ + "\2\u00f7\u00f8\5B\"\2\u00f8\u00f9\7\f\2\2\u00f9\u00fa\7\3\2\2\u00fa\u00fb"+ + "\5\n\6\2\u00fb\u00fc\7\4\2\2\u00fc\33\3\2\2\2\u00fd\u00fe\t\t\2\2\u00fe"+ + "\35\3\2\2\2\u00ff\u0104\5*\26\2\u0100\u0102\7\f\2\2\u0101\u0100\3\2\2"+ + "\2\u0101\u0102\3\2\2\2\u0102\u0103\3\2\2\2\u0103\u0105\5B\"\2\u0104\u0101"+ + "\3\2\2\2\u0104\u0105\3\2\2\2\u0105\37\3\2\2\2\u0106\u010a\5(\25\2\u0107"+ + "\u0109\5\"\22\2\u0108\u0107\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3"+ + "\2\2\2\u010a\u010b\3\2\2\2\u010b!\3\2\2\2\u010c\u010a\3\2\2\2\u010d\u010e"+ + "\5$\23\2\u010e\u010f\7%\2\2\u010f\u0111\5(\25\2\u0110\u0112\5&\24\2\u0111"+ + "\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112\u0119\3\2\2\2\u0113\u0114\7+"+ + "\2\2\u0114\u0115\5$\23\2\u0115\u0116\7%\2\2\u0116\u0117\5(\25\2\u0117"+ + "\u0119\3\2\2\2\u0118\u010d\3\2\2\2\u0118\u0113\3\2\2\2\u0119#\3\2\2\2"+ + "\u011a\u011c\7#\2\2\u011b\u011a\3\2\2\2\u011b\u011c\3\2\2\2\u011c\u012a"+ + "\3\2\2\2\u011d\u011f\7&\2\2\u011e\u0120\7\62\2\2\u011f\u011e\3\2\2\2\u011f"+ + "\u0120\3\2\2\2\u0120\u012a\3\2\2\2\u0121\u0123\7\67\2\2\u0122\u0124\7"+ + "\62\2\2\u0123\u0122\3\2\2\2\u0123\u0124\3\2\2\2\u0124\u012a\3\2\2\2\u0125"+ + "\u0127\7\35\2\2\u0126\u0128\7\62\2\2\u0127\u0126\3\2\2\2\u0127\u0128\3"+ + "\2\2\2\u0128\u012a\3\2\2\2\u0129\u011b\3\2\2\2\u0129\u011d\3\2\2\2\u0129"+ + "\u0121\3\2\2\2\u0129\u0125\3\2\2\2\u012a%\3\2\2\2\u012b\u012c\7.\2\2\u012c"+ + "\u013a\5,\27\2\u012d\u012e\7?\2\2\u012e\u012f\7\3\2\2\u012f\u0134\5B\""+ + "\2\u0130\u0131\7\5\2\2\u0131\u0133\5B\"\2\u0132\u0130\3\2\2\2\u0133\u0136"+ + "\3\2\2\2\u0134\u0132\3\2\2\2\u0134\u0135\3\2\2\2\u0135\u0137\3\2\2\2\u0136"+ + "\u0134\3\2\2\2\u0137\u0138\7\4\2\2\u0138\u013a\3\2\2\2\u0139\u012b\3\2"+ + "\2\2\u0139\u012d\3\2\2\2\u013a\'\3\2\2\2\u013b\u0140\5@!\2\u013c\u013e"+ + "\7\f\2\2\u013d\u013c\3\2\2\2\u013d\u013e\3\2\2\2\u013e\u013f\3\2\2\2\u013f"+ + "\u0141\5> \2\u0140\u013d\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0155\3\2\2"+ + "\2\u0142\u0143\7\3\2\2\u0143\u0144\5\n\6\2\u0144\u0149\7\4\2\2\u0145\u0147"+ + "\7\f\2\2\u0146\u0145\3\2\2\2\u0146\u0147\3\2\2\2\u0147\u0148\3\2\2\2\u0148"+ + "\u014a\5> \2\u0149\u0146\3\2\2\2\u0149\u014a\3\2\2\2\u014a\u0155\3\2\2"+ + "\2\u014b\u014c\7\3\2\2\u014c\u014d\5 \21\2\u014d\u0152\7\4\2\2\u014e\u0150"+ + "\7\f\2\2\u014f\u014e\3\2\2\2\u014f\u0150\3\2\2\2\u0150\u0151\3\2\2\2\u0151"+ + "\u0153\5> \2\u0152\u014f\3\2\2\2\u0152\u0153\3\2\2\2\u0153\u0155\3\2\2"+ + "\2\u0154\u013b\3\2\2\2\u0154\u0142\3\2\2\2\u0154\u014b\3\2\2\2\u0155)"+ + "\3\2\2\2\u0156\u0157\5,\27\2\u0157+\3\2\2\2\u0158\u0159\b\27\1\2\u0159"+ + "\u015a\7,\2\2\u015a\u0188\5,\27\n\u015b\u015c\7\27\2\2\u015c\u015d\7\3"+ + "\2\2\u015d\u015e\5\b\5\2\u015e\u015f\7\4\2\2\u015f\u0188\3\2\2\2\u0160"+ + "\u0161\7\66\2\2\u0161\u0162\7\3\2\2\u0162\u0167\7O\2\2\u0163\u0164\7\5"+ + "\2\2\u0164\u0166\7O\2\2\u0165\u0163\3\2\2\2\u0166\u0169\3\2\2\2\u0167"+ + "\u0165\3\2\2\2\u0167\u0168\3\2\2\2\u0168\u016a\3\2\2\2\u0169\u0167\3\2"+ + "\2\2\u016a\u0188\7\4\2\2\u016b\u016c\7*\2\2\u016c\u016d\7\3\2\2\u016d"+ + "\u016e\5> \2\u016e\u016f\7\5\2\2\u016f\u0174\7O\2\2\u0170\u0171\7\5\2"+ + "\2\u0171\u0173\7O\2\2\u0172\u0170\3\2\2\2\u0173\u0176\3\2\2\2\u0174\u0172"+ + "\3\2\2\2\u0174\u0175\3\2\2\2\u0175\u0177\3\2\2\2\u0176\u0174\3\2\2\2\u0177"+ + "\u0178\7\4\2\2\u0178\u0188\3\2\2\2\u0179\u017a\7*\2\2\u017a\u017b\7\3"+ + "\2\2\u017b\u017c\7O\2\2\u017c\u017d\7\5\2\2\u017d\u0182\7O\2\2\u017e\u017f"+ + "\7\5\2\2\u017f\u0181\7O\2\2\u0180\u017e\3\2\2\2\u0181\u0184\3\2\2\2\u0182"+ + "\u0180\3\2\2\2\u0182\u0183\3\2\2\2\u0183\u0185\3\2\2\2\u0184\u0182\3\2"+ + "\2\2\u0185\u0188\7\4\2\2\u0186\u0188\5.\30\2\u0187\u0158\3\2\2\2\u0187"+ + "\u015b\3\2\2\2\u0187\u0160\3\2\2\2\u0187\u016b\3\2\2\2\u0187\u0179\3\2"+ + "\2\2\u0187\u0186\3\2\2\2\u0188\u0191\3\2\2\2\u0189\u018a\f\4\2\2\u018a"+ + "\u018b\7\n\2\2\u018b\u0190\5,\27\5\u018c\u018d\f\3\2\2\u018d\u018e\7\60"+ + "\2\2\u018e\u0190\5,\27\4\u018f\u0189\3\2\2\2\u018f\u018c\3\2\2\2\u0190"+ + "\u0193\3\2\2\2\u0191\u018f\3\2\2\2\u0191\u0192\3\2\2\2\u0192-\3\2\2\2"+ + "\u0193\u0191\3\2\2\2\u0194\u0196\5\62\32\2\u0195\u0197\5\60\31\2\u0196"+ + "\u0195\3\2\2\2\u0196\u0197\3\2\2\2\u0197/\3\2\2\2\u0198\u019a\7,\2\2\u0199"+ + "\u0198\3\2\2\2\u0199\u019a\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019c\7\16"+ + "\2\2\u019c\u019d\5\62\32\2\u019d\u019e\7\n\2\2\u019e\u019f\5\62\32\2\u019f"+ + "\u01c2\3\2\2\2\u01a0\u01a2\7,\2\2\u01a1\u01a0\3\2\2\2\u01a1\u01a2\3\2"+ + "\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a4\7\"\2\2\u01a4\u01a5\7\3\2\2\u01a5"+ + "\u01aa\5*\26\2\u01a6\u01a7\7\5\2\2\u01a7\u01a9\5*\26\2\u01a8\u01a6\3\2"+ + "\2\2\u01a9\u01ac\3\2\2\2\u01aa\u01a8\3\2\2\2\u01aa\u01ab\3\2\2\2\u01ab"+ + "\u01ad\3\2\2\2\u01ac\u01aa\3\2\2\2\u01ad\u01ae\7\4\2\2\u01ae\u01c2\3\2"+ + "\2\2\u01af\u01b1\7,\2\2\u01b0\u01af\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1"+ + "\u01b2\3\2\2\2\u01b2\u01b3\7\"\2\2\u01b3\u01b4\7\3\2\2\u01b4\u01b5\5\b"+ + "\5\2\u01b5\u01b6\7\4\2\2\u01b6\u01c2\3\2\2\2\u01b7\u01b9\7,\2\2\u01b8"+ + "\u01b7\3\2\2\2\u01b8\u01b9\3\2\2\2\u01b9\u01ba\3\2\2\2\u01ba\u01bb\t\n"+ + "\2\2\u01bb\u01c2\5\62\32\2\u01bc\u01be\7$\2\2\u01bd\u01bf\7,\2\2\u01be"+ + "\u01bd\3\2\2\2\u01be\u01bf\3\2\2\2\u01bf\u01c0\3\2\2\2\u01c0\u01c2\7-"+ + "\2\2\u01c1\u0199\3\2\2\2\u01c1\u01a1\3\2\2\2\u01c1\u01b0\3\2\2\2\u01c1"+ + "\u01b8\3\2\2\2\u01c1\u01bc\3\2\2\2\u01c2\61\3\2\2\2\u01c3\u01c4\b\32\1"+ + "\2\u01c4\u01c8\5\64\33\2\u01c5\u01c6\t\13\2\2\u01c6\u01c8\5\62\32\6\u01c7"+ + "\u01c3\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c8\u01d5\3\2\2\2\u01c9\u01ca\f\5"+ + "\2\2\u01ca\u01cb\t\f\2\2\u01cb\u01d4\5\62\32\6\u01cc\u01cd\f\4\2\2\u01cd"+ + "\u01ce\t\13\2\2\u01ce\u01d4\5\62\32\5\u01cf\u01d0\f\3\2\2\u01d0\u01d1"+ + "\58\35\2\u01d1\u01d2\5\62\32\4\u01d2\u01d4\3\2\2\2\u01d3\u01c9\3\2\2\2"+ + "\u01d3\u01cc\3\2\2\2\u01d3\u01cf\3\2\2\2\u01d4\u01d7\3\2\2\2\u01d5\u01d3"+ + "\3\2\2\2\u01d5\u01d6\3\2\2\2\u01d6\63\3\2\2\2\u01d7\u01d5\3\2\2\2\u01d8"+ + "\u01d9\7\20\2\2\u01d9\u01da\7\3\2\2\u01da\u01db\5*\26\2\u01db\u01dc\7"+ + "\f\2\2\u01dc\u01dd\5<\37\2\u01dd\u01de\7\4\2\2\u01de\u020a\3\2\2\2\u01df"+ + "\u01e0\7\31\2\2\u01e0\u01e1\7\3\2\2\u01e1\u01e2\5B\"\2\u01e2\u01e3\7\34"+ + "\2\2\u01e3\u01e4\5\62\32\2\u01e4\u01e5\7\4\2\2\u01e5\u020a\3\2\2\2\u01e6"+ + "\u020a\5\66\34\2\u01e7\u020a\7K\2\2\u01e8\u01e9\5> \2\u01e9\u01ea\7\6"+ + "\2\2\u01ea\u01ec\3\2\2\2\u01eb\u01e8\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec"+ + "\u01ed\3\2\2\2\u01ed\u020a\7K\2\2\u01ee\u01ef\5B\"\2\u01ef\u01fb\7\3\2"+ + "\2\u01f0\u01f2\5\34\17\2\u01f1\u01f0\3\2\2\2\u01f1\u01f2\3\2\2\2\u01f2"+ + "\u01f3\3\2\2\2\u01f3\u01f8\5*\26\2\u01f4\u01f5\7\5\2\2\u01f5\u01f7\5*"+ + "\26\2\u01f6\u01f4\3\2\2\2\u01f7\u01fa\3\2\2\2\u01f8\u01f6\3\2\2\2\u01f8"+ + "\u01f9\3\2\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fb\u01f1\3\2"+ + "\2\2\u01fb\u01fc\3\2\2\2\u01fc\u01fd\3\2\2\2\u01fd\u01fe\7\4\2\2\u01fe"+ + "\u020a\3\2\2\2\u01ff\u0200\7\3\2\2\u0200\u0201\5\b\5\2\u0201\u0202\7\4"+ + "\2\2\u0202\u020a\3\2\2\2\u0203\u020a\5B\"\2\u0204\u020a\5> \2\u0205\u0206"+ + "\7\3\2\2\u0206\u0207\5*\26\2\u0207\u0208\7\4\2\2\u0208\u020a\3\2\2\2\u0209"+ + "\u01d8\3\2\2\2\u0209\u01df\3\2\2\2\u0209\u01e6\3\2\2\2\u0209\u01e7\3\2"+ + "\2\2\u0209\u01eb\3\2\2\2\u0209\u01ee\3\2\2\2\u0209\u01ff\3\2\2\2\u0209"+ + "\u0203\3\2\2\2\u0209\u0204\3\2\2\2\u0209\u0205\3\2\2\2\u020a\65\3\2\2"+ + "\2\u020b\u0217\7-\2\2\u020c\u020d\5B\"\2\u020d\u020e\7O\2\2\u020e\u0217"+ + "\3\2\2\2\u020f\u0217\5H%\2\u0210\u0217\5:\36\2\u0211\u0213\7O\2\2\u0212"+ + "\u0211\3\2\2\2\u0213\u0214\3\2\2\2\u0214\u0212\3\2\2\2\u0214\u0215\3\2"+ + "\2\2\u0215\u0217\3\2\2\2\u0216\u020b\3\2\2\2\u0216\u020c\3\2\2\2\u0216"+ + "\u020f\3\2\2\2\u0216\u0210\3\2\2\2\u0216\u0212\3\2\2\2\u0217\67\3\2\2"+ + "\2\u0218\u0219\t\r\2\2\u02199\3\2\2\2\u021a\u021b\t\16\2\2\u021b;\3\2"+ + "\2\2\u021c\u021d\5B\"\2\u021d=\3\2\2\2\u021e\u021f\5B\"\2\u021f\u0220"+ + "\7\6\2\2\u0220\u0222\3\2\2\2\u0221\u021e\3\2\2\2\u0222\u0225\3\2\2\2\u0223"+ + "\u0221\3\2\2\2\u0223\u0224\3\2\2\2\u0224\u0226\3\2\2\2\u0225\u0223\3\2"+ + "\2\2\u0226\u0227\5B\"\2\u0227?\3\2\2\2\u0228\u0229\5B\"\2\u0229A\3\2\2"+ + "\2\u022a\u022d\5D#\2\u022b\u022d\5F$\2\u022c\u022a\3\2\2\2\u022c\u022b"+ + "\3\2\2\2\u022dC\3\2\2\2\u022e\u0231\7T\2\2\u022f\u0231\7U\2\2\u0230\u022e"+ + "\3\2\2\2\u0230\u022f\3\2\2\2\u0231E\3\2\2\2\u0232\u0236\7R\2\2\u0233\u0236"+ + "\5J&\2\u0234\u0236\7S\2\2\u0235\u0232\3\2\2\2\u0235\u0233\3\2\2\2\u0235"+ + "\u0234\3\2\2\2\u0236G\3\2\2\2\u0237\u023a\7Q\2\2\u0238\u023a\7P\2\2\u0239"+ + "\u0237\3\2\2\2\u0239\u0238\3\2\2\2\u023aI\3\2\2\2\u023b\u023c\t\17\2\2"+ + "\u023cK\3\2\2\2P[]ajlpvy\u0084\u0087\u008b\u0093\u0096\u00a2\u00a5\u00a9"+ + "\u00b0\u00b4\u00b8\u00bf\u00c3\u00c7\u00cc\u00d0\u00d8\u00dc\u00e3\u00ee"+ + "\u00f1\u00f5\u0101\u0104\u010a\u0111\u0118\u011b\u011f\u0123\u0127\u0129"+ + "\u0134\u0139\u013d\u0140\u0146\u0149\u014f\u0152\u0154\u0167\u0174\u0182"+ + "\u0187\u018f\u0191\u0196\u0199\u01a1\u01aa\u01b0\u01b8\u01be\u01c1\u01c7"+ + "\u01d3\u01d5\u01eb\u01f1\u01f8\u01fb\u0209\u0214\u0216\u0223\u022c\u0230"+ + "\u0235\u0239"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 1539a5285dd..68a756d4562 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -361,12 +361,6 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); - /** - * Visit a parse tree produced by {@link SqlBaseParser#columnExpression}. - * @param ctx the parse tree - * @return the visitor result - */ - T visitColumnExpression(SqlBaseParser.ColumnExpressionContext ctx); /** * Visit a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java index 2eb9eca3fa2..b8e09d131e8 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/EsRelation.java @@ -6,24 +6,18 @@ package org.elasticsearch.xpack.sql.plan.logical; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; -import org.elasticsearch.xpack.sql.analysis.index.MappingException; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.CompoundDataType; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.NestedType; -import org.elasticsearch.xpack.sql.type.StringType; import org.elasticsearch.xpack.sql.util.StringUtils; +import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Objects; -import java.util.stream.Stream; - -import static java.util.Collections.emptyList; -import static java.util.stream.Collectors.toList; -import static org.elasticsearch.xpack.sql.util.CollectionUtils.combine; public class EsRelation extends LeafPlan { @@ -33,32 +27,30 @@ public class EsRelation extends LeafPlan { public EsRelation(Location location, EsIndex index) { super(location); this.index = index; - attrs = flatten(location, index.mapping()).collect(toList()); + attrs = flatten(location, index.mapping()); } - private static Stream flatten(Location location, Map mapping) { - return flatten(location, mapping, null, emptyList()); + private static List flatten(Location location, Map mapping) { + return flatten(location, mapping, null); } - - private static Stream flatten(Location location, Map mapping, String parent, List nestedParents) { - return mapping.entrySet().stream() - .filter(e -> e.getValue() != null) - .flatMap(e -> { - String name = parent != null ? parent + "." + e.getKey() : e.getKey(); - DataType t = e.getValue(); - if (t.isComplex() && !(t instanceof StringType)) { - if (t instanceof NestedType) { - return Stream.concat(Stream.of(new NestedFieldAttribute(location, name, t, nestedParents)), flatten(location, ((NestedType) t).properties(), name, combine(nestedParents, name))); - } - // if (t instanceof ObjectType) { - // return flatten(location, ((ObjectType) t).properties(), name, combine(nestedParents, name)); - // } - throw new MappingException("Does not know how to handle complex type %s", t); - } - Attribute att = nestedParents.isEmpty() ? new RootFieldAttribute(location, name, t) : new NestedFieldAttribute(location, name, t, nestedParents); - return Stream.of(att); - }); + private static List flatten(Location location, Map mapping, FieldAttribute parent) { + List list = new ArrayList<>(); + + for (Entry entry : mapping.entrySet()) { + String name = entry.getKey(); + DataType t = entry.getValue(); + + if (t != null) { + FieldAttribute f = new FieldAttribute(location, parent, parent != null ? parent.name() + "." + name : name, t); + list.add(f); + // object or nested + if (t instanceof CompoundDataType) { + list.addAll(flatten(location, ((CompoundDataType) t).properties(), f)); + } + } + } + return list; } public EsIndex index() { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java index e7180399507..97f36c34576 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/SubQueryAlias.java @@ -5,13 +5,14 @@ */ package org.elasticsearch.xpack.sql.plan.logical; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - import org.elasticsearch.xpack.sql.expression.Attribute; import org.elasticsearch.xpack.sql.tree.Location; +import java.util.List; +import java.util.Objects; + +import static java.util.stream.Collectors.toList; + public class SubQueryAlias extends UnaryPlan { private final String alias; @@ -30,7 +31,7 @@ public class SubQueryAlias extends UnaryPlan { return (alias == null ? child().output() : child().output().stream() .map(e -> e.withQualifier(alias)) - .collect(Collectors.toList()) + .collect(toList()) ); } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java index cb5bb6b67ac..a5fca9beabe 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Debug.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.rule.RuleExecutor.Batch; import org.elasticsearch.xpack.sql.rule.RuleExecutor.ExecutionInfo; @@ -65,7 +65,7 @@ public class Debug extends Command { @Override public List output() { - return singletonList(new RootFieldAttribute(location(), "plan", DataTypes.KEYWORD)); + return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD)); } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java index 297fc45bdf0..8b2aa78e90c 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/Explain.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.analysis.analyzer.Analyzer; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.plan.QueryPlan; import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.sql.plan.physical.PhysicalPlan; @@ -76,7 +76,7 @@ public class Explain extends Command { @Override public List output() { - return singletonList(new RootFieldAttribute(location(), "plan", DataTypes.KEYWORD)); + return singletonList(new FieldAttribute(location(), "plan", DataTypes.KEYWORD)); } @Override @@ -157,7 +157,7 @@ public class Explain extends Command { listener.onResponse(Rows.singleton(output(), printPlans(format, plan, analyzedPlan, optimizedPlan, mappedPlan, null))); }, listener::onFailure)); - // cannot continue + // cannot continue } else { if (type != Type.ALL) { listener.onResponse(Rows.singleton(output(), formatPlan(format, analyzedPlan))); diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java index ed34a74b69b..aa928990522 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowColumns.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; @@ -40,8 +40,8 @@ public class ShowColumns extends Command { @Override public List output() { - return asList(new RootFieldAttribute(location(), "column", DataTypes.KEYWORD), - new RootFieldAttribute(location(), "type", DataTypes.KEYWORD)); + return asList(new FieldAttribute(location(), "column", DataTypes.KEYWORD), + new FieldAttribute(location(), "type", DataTypes.KEYWORD)); } @Override @@ -56,7 +56,7 @@ public class ShowColumns extends Command { listener.onResponse(Rows.of(output(), rows)); }, listener::onFailure - )); + )); } private void fillInRows(Map mapping, String prefix, List> rows) { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java index 45c3f370eb3..13d14c758be 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowFunctions.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.FunctionDefinition; import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.sql.session.Rows; @@ -38,8 +38,8 @@ public class ShowFunctions extends Command { @Override public List output() { - return asList(new RootFieldAttribute(location(), "name", DataTypes.KEYWORD), - new RootFieldAttribute(location(), "type", DataTypes.KEYWORD)); + return asList(new FieldAttribute(location(), "name", DataTypes.KEYWORD), + new FieldAttribute(location(), "type", DataTypes.KEYWORD)); } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java index 919448607ba..9fd77979dc2 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowSchemas.java @@ -7,8 +7,7 @@ package org.elasticsearch.xpack.sql.plan.logical.command; import org.elasticsearch.action.ActionListener; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; -import org.elasticsearch.xpack.sql.session.RowSet; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; @@ -27,7 +26,7 @@ public class ShowSchemas extends Command { @Override public List output() { - return singletonList(new RootFieldAttribute(location(), "schema", DataTypes.KEYWORD)); + return singletonList(new FieldAttribute(location(), "schema", DataTypes.KEYWORD)); } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java index 62ae21aa84b..b389094a93b 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/plan/logical/command/ShowTables.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.expression.Attribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.session.Rows; import org.elasticsearch.xpack.sql.session.SchemaRowSet; import org.elasticsearch.xpack.sql.session.SqlSession; @@ -40,7 +40,7 @@ public class ShowTables extends Command { @Override public List output() { - return Collections.singletonList(new RootFieldAttribute(location(), "table", DataTypes.KEYWORD)); + return Collections.singletonList(new FieldAttribute(location(), "table", DataTypes.KEYWORD)); } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index c87668fd901..7a2e3058880 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -13,8 +13,6 @@ import org.elasticsearch.xpack.sql.expression.ExpressionId; import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.NamedExpression; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; import org.elasticsearch.xpack.sql.expression.UnaryExpression; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.Functions; @@ -261,8 +259,8 @@ abstract class QueryTranslator { // change analyzed to non non-analyzed attributes if (exp instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) exp; - if (fa.isAnalyzed()) { - ne = fa.notAnalyzedAttribute(); + if (fa.isInexact()) { + ne = fa.exactAttribute(); } } aggId = ne.id().toString(); @@ -415,8 +413,8 @@ abstract class QueryTranslator { static String field(AggregateFunction af) { Expression arg = af.field(); - if (arg instanceof RootFieldAttribute) { - return ((RootFieldAttribute) arg).name(); + if (arg instanceof FieldAttribute) { + return ((FieldAttribute) arg).name(); } if (arg instanceof Literal) { return String.valueOf(((Literal) arg).value()); @@ -431,18 +429,18 @@ abstract class QueryTranslator { @Override protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) { Query q = null; - boolean analyzed = true; + boolean inexact = true; String target = null; if (e.left() instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) e.left(); - analyzed = fa.isAnalyzed(); - target = nameOf(analyzed ? fa : fa.notAnalyzedAttribute()); + inexact = fa.isInexact(); + target = nameOf(inexact ? fa : fa.exactAttribute()); } String pattern = sqlToEsPatternMatching(stringValueOf(e.right())); if (e instanceof Like) { - if (analyzed) { + if (inexact) { q = new QueryStringQuery(e.location(), pattern, target); } else { @@ -451,7 +449,7 @@ abstract class QueryTranslator { } if (e instanceof RLike) { - if (analyzed) { + if (inexact) { q = new QueryStringQuery(e.location(), "/" + pattern + "/", target); } else { @@ -622,7 +620,7 @@ abstract class QueryTranslator { if (bc instanceof Equals) { if (bc.left() instanceof FieldAttribute) { FieldAttribute fa = (FieldAttribute) bc.left(); - if (fa.isAnalyzed()) { + if (fa.isInexact()) { return new MatchQuery(loc, name, value); } } @@ -870,9 +868,11 @@ abstract class QueryTranslator { protected abstract QueryTranslation asQuery(E e, boolean onAggs); protected static Query wrapIfNested(Query query, Expression exp) { - if (exp instanceof NestedFieldAttribute) { - NestedFieldAttribute nfa = (NestedFieldAttribute) exp; - return new NestedQuery(nfa.location(), nfa.parentPath(), query); + if (exp instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) exp; + if (fa.isNested()) { + return new NestedQuery(fa.location(), fa.nestedParent().name(), query); + } } return query; } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/NestedFieldRef.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/NestedFieldRef.java deleted file mode 100644 index c521bdb0b69..00000000000 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/NestedFieldRef.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.querydsl.container; - -import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; - - -public class NestedFieldRef implements FieldReference { - private final String parent, name; - private final boolean docValue; - - public NestedFieldRef(String parent, String name, boolean useDocValueInsteadOfSource) { - this.parent = parent; - this.name = name; - this.docValue = useDocValueInsteadOfSource; - } - - public String parent() { - return parent; - } - - @Override - public String name() { - return name; - } - - public boolean useDocValue() { - return docValue; - } - - @Override - public void collectFields(SqlSourceBuilder sourceBuilder) { - throw new IllegalStateException("unhandled nested field while collecting source fields [" + getClass() + "]"); - } - - @Override - public String toString() { - return name; - } -} diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java index 58211b4872c..bff3e797637 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/QueryContainer.java @@ -12,9 +12,8 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.execution.search.SourceGenerator; import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.LiteralAttribute; -import org.elasticsearch.xpack.sql.expression.NestedFieldAttribute; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; import org.elasticsearch.xpack.sql.expression.function.ScoreAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.AttributeInput; @@ -197,15 +196,15 @@ public class QueryContainer { // // reference methods // - private ColumnReference fieldRef(RootFieldAttribute fieldAttr) { + private ColumnReference searchHitFieldRef(FieldAttribute fieldAttr) { return new SearchHitFieldRef(aliasName(fieldAttr), fieldAttr.dataType().hasDocValues()); } - private Tuple nestedFieldRef(NestedFieldAttribute attr) { + private Tuple nestedFieldRef(FieldAttribute attr) { // attach the field to the relevant nested query List nestedRefs = new ArrayList<>(); - String parent = attr.parentPath(); + String parent = attr.nestedParent().name(); String name = aliasName(attr); Query q = query; @@ -234,7 +233,7 @@ public class QueryContainer { } } - NestedFieldRef nestedFieldRef = new NestedFieldRef(attr.parentPath(), attr.name(), attr.dataType().hasDocValues()); + SearchHitFieldRef nestedFieldRef = new SearchHitFieldRef(attr.name(), attr.dataType().hasDocValues(), parent); nestedRefs.add(nestedFieldRef); return new Tuple<>(new QueryContainer(q, aggs, columns, aliases, pseudoFunctions, scalarFunctions, sort, limit), nestedFieldRef); @@ -277,11 +276,13 @@ public class QueryContainer { } private Tuple toReference(Attribute attr) { - if (attr instanceof RootFieldAttribute) { - return new Tuple<>(this, fieldRef((RootFieldAttribute) attr)); - } - if (attr instanceof NestedFieldAttribute) { - return nestedFieldRef((NestedFieldAttribute) attr); + if (attr instanceof FieldAttribute) { + FieldAttribute fa = (FieldAttribute) attr; + if (fa.isNested()) { + return nestedFieldRef(fa); + } else { + return new Tuple<>(this, searchHitFieldRef(fa)); + } } if (attr instanceof ScalarFunctionAttribute) { return computingRef((ScalarFunctionAttribute) attr); diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java index 2e9dbb0cc06..df3f657607e 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/querydsl/container/SearchHitFieldRef.java @@ -10,10 +10,20 @@ import org.elasticsearch.xpack.sql.execution.search.SqlSourceBuilder; public class SearchHitFieldRef implements FieldReference { private final String name; private final boolean docValue; + private final String hitName; public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource) { + this(name, useDocValueInsteadOfSource, null); + } + + public SearchHitFieldRef(String name, boolean useDocValueInsteadOfSource, String hitName) { this.name = name; this.docValue = useDocValueInsteadOfSource; + this.hitName = hitName; + } + + public String hitName() { + return hitName; } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java index 8748841b86d..575cdc6bbff 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/session/Configuration.java @@ -12,7 +12,7 @@ import org.elasticsearch.xpack.sql.protocol.shared.AbstractQueryInitRequest; import org.elasticsearch.xpack.sql.protocol.shared.Nullable; import org.joda.time.DateTimeZone; -// Typed object holding properties for a given +// Typed object holding properties for a given action public class Configuration { public static final Configuration DEFAULT = new Configuration(DateTimeZone.UTC, AbstractQueryInitRequest.DEFAULT_FETCH_SIZE, @@ -24,6 +24,7 @@ public class Configuration { private int pageSize; private TimeValue requestTimeout; private TimeValue pageTimeout; + @Nullable private QueryBuilder filter; diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/CompoundDataType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/CompoundDataType.java index 431b007d759..259fd22c97d 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/CompoundDataType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/CompoundDataType.java @@ -7,39 +7,48 @@ package org.elasticsearch.xpack.sql.type; import java.sql.JDBCType; import java.util.Map; +import java.util.Objects; -public interface CompoundDataType extends DataType { +public abstract class CompoundDataType extends AbstractDataType { - @Override - default JDBCType sqlType() { - return JDBCType.STRUCT; + private final Map properties; + + CompoundDataType(JDBCType sqlType, boolean hasDocValues, Map properties) { + super(sqlType, hasDocValues); + this.properties = properties; + } + + public Map properties() { + return properties; } @Override - default int precision() { + public int precision() { return 0; } @Override - default boolean isInteger() { + public boolean isInteger() { return false; } @Override - default boolean isRational() { + public boolean isRational() { return false; } @Override - default boolean isPrimitive() { + public boolean isPrimitive() { return false; } @Override - default boolean hasDocValues() { - return false; + public int hashCode() { + return Objects.hash(super.hashCode(), Objects.hash(properties)); } - Map properties(); - + @Override + public boolean equals(Object obj) { + return super.equals(obj) && Objects.equals(properties, ((CompoundDataType) obj).properties); + } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index 89c79133ddf..22abeaac4af 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -55,10 +55,6 @@ public interface DataType { return isInteger() || isRational(); } - default boolean isComplex() { - return !isPrimitive(); - } - boolean isPrimitive(); default boolean same(DataType other) { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java index 754949c6ef8..0040848e5a9 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/DataTypeConversion.java @@ -71,17 +71,17 @@ public abstract class DataTypeConversion { public static boolean canConvert(DataType from, DataType to) { // TODO it'd be cleaner and more right to fetch the conversion // only primitives are supported so far - if (from.isComplex() || to.isComplex()) { + if (!from.isPrimitive() || !to.isPrimitive()) { return false; } - + if (from.getClass() == to.getClass()) { return true; } if (from instanceof NullType) { return true; } - + // anything can be converted to String if (to instanceof StringType) { return true; diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/KeywordType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/KeywordType.java index e692e0c104a..bc135d61b02 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/KeywordType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/KeywordType.java @@ -13,13 +13,25 @@ import static java.util.Collections.emptyMap; public class KeywordType extends StringType { static final int DEFAULT_LENGTH = 256; - static final KeywordType DEFAULT = new KeywordType(true, DEFAULT_LENGTH, emptyMap()); + static final boolean DEFAULT_NORMALIZED = false; + static final KeywordType DEFAULT = new KeywordType(true, DEFAULT_LENGTH, DEFAULT_NORMALIZED, emptyMap()); private final int length; - - KeywordType(boolean docValues, int length, Map fields) { + private final boolean normalized; + + KeywordType(boolean docValues, int length, boolean normalized, Map fields) { super(docValues, fields); this.length = length; + this.normalized = normalized; + } + + @Override + public boolean isInexact() { + return normalized; + } + + public boolean isNormalized() { + return normalized; } @Override @@ -32,28 +44,19 @@ public class KeywordType extends StringType { return length; } - static DataType from(boolean docValues, int length, Map fields) { - return docValues && length == DEFAULT_LENGTH && fields.isEmpty() ? DEFAULT : new KeywordType(docValues, length, fields); - } - @Override public int hashCode() { return Objects.hash(length, hasDocValues(), fields()); } - + @Override public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - KeywordType other = (KeywordType) obj; - return Objects.equals(hasDocValues(), other.hasDocValues()) - && Objects.equals(length, other.length) - && Objects.equals(fields(), other.fields()); + return super.equals(obj) && length == ((KeywordType) obj).length; + } + + static DataType from(boolean docValues, int length, boolean normalized, Map fields) { + return docValues && length == DEFAULT_LENGTH && fields.isEmpty() && normalized == DEFAULT_NORMALIZED + ? DEFAULT + : new KeywordType(docValues, length, normalized, fields); } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/NestedType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/NestedType.java index 8c38c25ed64..7382e62a2fe 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/NestedType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/NestedType.java @@ -5,19 +5,13 @@ */ package org.elasticsearch.xpack.sql.type; +import java.sql.JDBCType; import java.util.Map; -import java.util.Objects; -public class NestedType implements CompoundDataType { - - private final Map properties; +public class NestedType extends CompoundDataType { public NestedType(Map properties) { - this.properties = properties; - } - - public Map properties() { - return properties; + super(JDBCType.STRUCT, false, properties); } @Override @@ -25,27 +19,8 @@ public class NestedType implements CompoundDataType { return "nested"; } - @Override - public int hashCode() { - return Objects.hash(properties); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - NestedType other = (NestedType) obj; - return Objects.equals(properties, other.properties); - } - @Override public String toString() { - return getClass().getSimpleName() + "[" + esName() + "|" + sqlName() + "]=" + properties; + return "N" + properties(); } } \ No newline at end of file diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/ObjectType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/ObjectType.java index c7a6a3803fa..0c63ab6d334 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/ObjectType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/ObjectType.java @@ -5,23 +5,17 @@ */ package org.elasticsearch.xpack.sql.type; +import java.sql.JDBCType; import java.util.Map; -import java.util.Objects; import static java.util.Collections.emptyMap; -public class ObjectType implements CompoundDataType { +public class ObjectType extends CompoundDataType { public static final ObjectType EMPTY = new ObjectType(emptyMap()); - - private final Map properties; - public ObjectType(Map properties) { - this.properties = properties; - } - - public Map properties() { - return properties; + ObjectType(Map properties) { + super(JDBCType.STRUCT, false, properties); } @Override @@ -30,21 +24,7 @@ public class ObjectType implements CompoundDataType { } @Override - public int hashCode() { - return Objects.hash(properties); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ObjectType other = (ObjectType) obj; - return Objects.equals(properties, other.properties); + public String toString() { + return "O" + properties(); } } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/StringType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/StringType.java index e30506e80d1..abe9128434e 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/StringType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/StringType.java @@ -8,56 +8,75 @@ package org.elasticsearch.xpack.sql.type; import java.sql.JDBCType; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; import static java.util.Collections.emptyMap; -import static java.util.stream.Collectors.toMap; -public abstract class StringType implements DataType { +// String type is a special type of CompoundDataType +public abstract class StringType extends CompoundDataType { private final boolean docValue; private final Map fields; - private final Map docValueFields; + private final Map exactKeywords; + StringType(boolean docValue, Map fields) { + super(JDBCType.VARCHAR, docValue, fields); + this.docValue = docValue; this.fields = fields; if (docValue || fields.isEmpty()) { - docValueFields = emptyMap(); + exactKeywords = emptyMap(); } else { - docValueFields = fields.entrySet().stream() - .filter(e -> e.getValue().hasDocValues()) - .collect(toMap( - Map.Entry::getKey, - Map.Entry::getValue, - (k1, k2) -> { - throw new IllegalStateException("Duplicate key " + k1); - }, - LinkedHashMap::new)); + exactKeywords = new LinkedHashMap<>(); + for (Entry entry : fields.entrySet()) { + DataType t = entry.getValue(); + // consider only non-normalized keywords + if (t instanceof KeywordType) { + KeywordType kt = (KeywordType) t; + if (!kt.isNormalized()) { + exactKeywords.put(entry.getKey(), kt); + } + } + } } } - @Override - public JDBCType sqlType() { - return JDBCType.VARCHAR; - } - - @Override - public boolean hasDocValues() { - return docValue; - } + public abstract boolean isInexact(); public Map fields() { - return fields; + return properties(); } - public Map docValueFields() { - return docValueFields; + public Map exactKeywords() { + return exactKeywords; } @Override public boolean isPrimitive() { - return fields.isEmpty(); + return true; + } + + @Override + public int precision() { + return Integer.MAX_VALUE; + } + + @Override + public int hashCode() { + return Objects.hash(docValue, fields); + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + StringType other = (StringType) obj; + return Objects.equals(docValue, other.docValue) + && Objects.equals(fields(), other.fields()); + } + return false; } @Override diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/TextType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/TextType.java index 30182eff789..47b74053f86 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/TextType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/TextType.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.type; import java.util.Map; +import java.util.Objects; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -29,11 +30,26 @@ public class TextType extends StringType { return fieldData; } + @Override + public boolean isInexact() { + return true; + } + @Override public String esName() { return "text"; } + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), fieldData); + } + + @Override + public boolean equals(Object obj) { + return super.equals(obj) && fieldData == ((TextType) obj).fieldData; + } + static DataType from(boolean fieldData, Map fields) { return DEFAULT.fieldData == fieldData && DEFAULT.fields().equals(fields) ? DEFAULT : new TextType(fieldData, fields); } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/Types.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/Types.java index bdb0ecab198..dc4f10d8285 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/Types.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/Types.java @@ -9,38 +9,71 @@ import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.xpack.sql.analysis.index.MappingException; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Map.Entry; +import java.util.Set; import static java.lang.Math.floor; import static java.lang.Math.log10; import static java.lang.Math.round; import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableSet; public abstract class Types { - @SuppressWarnings("unchecked") - public static Map fromEs(Map asMap) { - Map props = (Map) asMap.get("properties"); - return props == null || props.isEmpty() ? emptyMap() : startWalking(props); + private static final Set KNOWN_TYPES; + + static { + Set types = new HashSet<>(); + types.add("text"); + types.add("keyword"); + types.add("long"); + types.add("integer"); + types.add("short"); + types.add("byte"); + types.add("double"); + types.add("float"); + types.add("half_float"); + types.add("scaled_float"); + types.add("date"); + types.add("boolean"); + types.add("binary"); + types.add("object"); + types.add("nested"); + + KNOWN_TYPES = unmodifiableSet(types); } - private static Map startWalking(Map mapping) { - Map translated = new LinkedHashMap<>(); + public static Map fromEs(Map asMap) { + return fromEs(asMap, false); + } + + @SuppressWarnings("unchecked") + public static Map fromEs(Map asMap, boolean ignoreUnsupported) { + Map props = null; + if (asMap != null && !asMap.isEmpty()) { + props = (Map) asMap.get("properties"); + } + return props == null || props.isEmpty() ? emptyMap() : startWalking(props, ignoreUnsupported); + } + + private static Map startWalking(Map mapping, boolean ignoreUnsupported) { + Map types = new LinkedHashMap<>(); if (mapping == null) { return emptyMap(); } for (Entry entry : mapping.entrySet()) { - walkMapping(entry.getKey(), entry.getValue(), translated); + walkMapping(entry.getKey(), entry.getValue(), types, ignoreUnsupported); } - return translated; + return types; } @SuppressWarnings("unchecked") - private static void walkMapping(String name, Object value, Map mapping) { + private static void walkMapping(String name, Object value, Map mapping, boolean ignoreUnsupported) { // object type - only root or nested docs supported if (value instanceof Map) { Map content = (Map) value; @@ -50,40 +83,44 @@ public abstract class Types { if (type instanceof String) { String st = type.toString(); - if (isNested(st)) { - mapping.put(name, new NestedType(fromEs(content))); - return; - } - - if (isPrimitive(st)) { - // check dates first to account for the format - mapping.put(name, createPrimitiveType(st, content)); - return; - } - - else { - throw new MappingException("Don't know how to parse entry %s in map %s", type, content); + if (knownType(st)) { + if (isNested(st)) { + mapping.put(name, new NestedType(fromEs(content))); + } else { + // check dates first to account for the format + mapping.put(name, createPrimitiveType(st, content, ignoreUnsupported)); + } + } else { + if (!ignoreUnsupported) { + throw new MappingException("Unsupported mapping type %s", type); + } } } - // object type ignored - } - else { - throw new MappingException("Don't know how to parse mapping %s", value); + // object type ? + else if (type == null && content.containsKey("properties")) { + mapping.put(name, new ObjectType(fromEs(content))); + } + // bail out + else { + throw new MappingException("Unsupported mapping %s", type); + } + } else { + throw new MappingException("Unrecognized mapping %s", value); } } @SuppressWarnings("unchecked") - private static DataType createPrimitiveType(String typeString, Map content) { + private static DataType createPrimitiveType(String typeString, Map content, boolean ignoreUnsupported) { // since this setting is available in most types, search for it regardless - + DataType type = null; - - boolean docValues = boolSetting(content.get("doc_values"), true); + + boolean docValues = boolSetting(content.get("doc_values"), true); switch (typeString) { case "date": Object fmt = content.get("format"); if (fmt != null) { - type = new DateType(docValues, Strings.split(fmt.toString(), "||")); + type = new DateType(docValues, Strings.delimitedListToStringArray(fmt.toString(), "||")); } else { type = docValues ? DateType.DEFAULT : new DateType(false); @@ -94,18 +131,19 @@ public abstract class Types { Object value = content.get("fields"); Map fields = emptyMap(); if (value instanceof Map) { - fields = startWalking((Map) value); + fields = startWalking((Map) value, ignoreUnsupported); } type = TextType.from(fieldData, fields); break; case "keyword": int length = intSetting(content.get("ignore_above"), KeywordType.DEFAULT_LENGTH); + boolean normalized = Strings.hasText(textSetting(content.get("normalizer"), null)); fields = emptyMap(); value = content.get("fields"); if (value instanceof Map) { - fields = startWalking((Map) value); + fields = startWalking((Map) value, ignoreUnsupported); } - type = KeywordType.from(docValues, length, fields); + type = KeywordType.from(docValues, length, normalized, fields); break; default: type = DataTypes.fromEsName(typeString, docValues); @@ -114,6 +152,10 @@ public abstract class Types { return type; } + private static String textSetting(Object value, String defaultValue) { + return value == null ? defaultValue : value.toString(); + } + private static boolean boolSetting(Object value, boolean defaultValue) { return value == null ? defaultValue : Booleans.parseBoolean(value.toString(), defaultValue); } @@ -122,8 +164,8 @@ public abstract class Types { return value == null ? defaultValue : Integer.parseInt(value.toString()); } - private static boolean isPrimitive(String string) { - return !isNested(string); + private static boolean knownType(String st) { + return KNOWN_TYPES.contains(st); } private static boolean isNested(String type) { diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/UnknownDataType.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/UnknownDataType.java index b77838cb906..f0751d67243 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/UnknownDataType.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/type/UnknownDataType.java @@ -18,14 +18,8 @@ public class UnknownDataType extends AbstractDataType { return "unknown"; } - @Override - public boolean isComplex() { - return false; - } - @Override public boolean isPrimitive() { return false; } - } diff --git a/sql/server/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java b/sql/server/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java index 4b70bd2f4cc..a0100eeee9a 100644 --- a/sql/server/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java +++ b/sql/server/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java @@ -22,7 +22,6 @@ import java.util.List; import java.util.Locale; import java.util.regex.Pattern; -import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; public abstract class StringUtils { @@ -62,13 +61,6 @@ public abstract class StringUtils { } } - public static String concatWithDot(List strings) { - if (strings == null || strings.isEmpty()) { - return EMPTY; - } - return strings.stream().collect(joining(".")); - } - //CamelCase to camel_case public static String camelCaseToUnderscore(String string) { if (!Strings.hasText(string)) { @@ -195,7 +187,7 @@ public abstract class StringUtils { for (String potentialMatch : potentialMatches) { float distance = ld.getDistance(match, potentialMatch); if (distance >= 0.5f) { - scoredMatches.add(new Tuple<>(distance, potentialMatch)); + scoredMatches.add(new Tuple<>(distance, potentialMatch)); } } CollectionUtil.timSort(scoredMatches, (a,b) -> b.v1().compareTo(a.v1())); diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java new file mode 100644 index 00000000000..33eec7d33be --- /dev/null +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/FieldAttributeTests.java @@ -0,0 +1,130 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.analysis.analyzer; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.analysis.index.EsIndex; +import org.elasticsearch.xpack.sql.analysis.index.GetIndexResult; +import org.elasticsearch.xpack.sql.analysis.index.MappingException; +import org.elasticsearch.xpack.sql.expression.Attribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.NamedExpression; +import org.elasticsearch.xpack.sql.expression.function.DefaultFunctionRegistry; +import org.elasticsearch.xpack.sql.expression.function.FunctionRegistry; +import org.elasticsearch.xpack.sql.parser.SqlParser; +import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.sql.plan.logical.Project; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.KeywordType; +import org.elasticsearch.xpack.sql.type.TextType; +import org.elasticsearch.xpack.sql.type.TypesTests; +import org.joda.time.DateTimeZone; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.sql.type.DataTypes.BOOLEAN; +import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; + +public class FieldAttributeTests extends ESTestCase { + + private SqlParser parser; + private GetIndexResult getIndexResult; + private FunctionRegistry functionRegistry; + private Analyzer analyzer; + + public FieldAttributeTests() { + parser = new SqlParser(DateTimeZone.UTC); + functionRegistry = new DefaultFunctionRegistry(); + + Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); + + EsIndex test = new EsIndex("test", mapping); + getIndexResult = GetIndexResult.valid(test); + analyzer = new Analyzer(functionRegistry, getIndexResult, DateTimeZone.UTC); + } + + private LogicalPlan plan(String sql) { + return analyzer.analyze(parser.createStatement(sql), true); + } + + private FieldAttribute attribute(String fieldName) { + LogicalPlan plan = plan("SELECT " + fieldName + " FROM test"); + assertThat(plan, instanceOf(Project.class)); + Project p = (Project) plan; + List projections = p.projections(); + assertThat(projections, hasSize(1)); + Attribute attribute = projections.get(0).toAttribute(); + assertThat(attribute, instanceOf(FieldAttribute.class)); + return (FieldAttribute) attribute; + } + + private String error(String fieldName) { + VerificationException ve = expectThrows(VerificationException.class, () -> plan("SELECT " + fieldName + " FROM test")); + return ve.getMessage(); + } + + public void testRootField() { + FieldAttribute attr = attribute("bool"); + assertThat(attr.name(), is("bool")); + assertThat(attr.dataType(), is(BOOLEAN)); + } + + public void testDottedField() { + FieldAttribute attr = attribute("some.dotted.field"); + assertThat(attr.path(), is("some.dotted")); + assertThat(attr.name(), is("some.dotted.field")); + assertThat(attr.dataType(), is(KEYWORD)); + } + + public void testExactKeyword() { + FieldAttribute attr = attribute("some.string"); + assertThat(attr.path(), is("some")); + assertThat(attr.name(), is("some.string")); + assertThat(attr.dataType(), instanceOf(TextType.class)); + assertThat(attr.isInexact(), is(true)); + FieldAttribute exact = attr.exactAttribute(); + assertThat(exact.isInexact(), is(false)); + assertThat(exact.name(), is("some.string.typical")); + assertThat(exact.dataType(), instanceOf(KeywordType.class)); + } + + public void testAmbiguousExactKeyword() { + FieldAttribute attr = attribute("some.ambiguous"); + assertThat(attr.path(), is("some")); + assertThat(attr.name(), is("some.ambiguous")); + assertThat(attr.dataType(), instanceOf(TextType.class)); + assertThat(attr.isInexact(), is(true)); + MappingException me = expectThrows(MappingException.class, () -> attr.exactAttribute()); + assertThat(me.getMessage(), + is("Multiple exact keyword candidates [one, two] available for some.ambiguous; specify which one to use")); + } + + public void testNormalizedKeyword() { + FieldAttribute attr = attribute("some.string.normalized"); + assertThat(attr.path(), is("some.string")); + assertThat(attr.name(), is("some.string.normalized")); + assertThat(attr.dataType(), instanceOf(KeywordType.class)); + assertThat(attr.isInexact(), is(true)); + } + + public void testDottedFieldPath() { + assertThat(error("some"), is("Found 1 problem(s)\nline 1:8: Cannot use field [some] (type object) only its subfields")); + } + + public void testDottedFieldPathDeeper() { + assertThat(error("some.dotted"), + is("Found 1 problem(s)\nline 1:8: Cannot use field [some.dotted] (type object) only its subfields")); + } + + public void testDottedFieldPathTypo() { + assertThat(error("some.dotted.fild"), + is("Found 1 problem(s)\nline 1:8: Unknown column [some.dotted.fild], did you mean [some.dotted.field]?")); + } +} \ No newline at end of file diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index c95c583ff6d..7a956a45390 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -6,29 +6,22 @@ package org.elasticsearch.xpack.sql.analysis.analyzer; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.sql.analysis.AnalysisException; import org.elasticsearch.xpack.sql.analysis.index.EsIndex; import org.elasticsearch.xpack.sql.analysis.index.GetIndexResult; import org.elasticsearch.xpack.sql.expression.function.DefaultFunctionRegistry; import org.elasticsearch.xpack.sql.parser.SqlParser; import org.elasticsearch.xpack.sql.type.DataType; -import org.elasticsearch.xpack.sql.type.DataTypes; +import org.elasticsearch.xpack.sql.type.TypesTests; import org.joda.time.DateTimeZone; -import java.util.LinkedHashMap; import java.util.Map; -@TestLogging("org.elasticsearch.xpack.sql:TRACE") public class VerifierErrorMessagesTests extends ESTestCase { private SqlParser parser = new SqlParser(DateTimeZone.UTC); private String verify(String sql) { - Map mapping = new LinkedHashMap<>(); - mapping.put("bool", DataTypes.BOOLEAN); - mapping.put("int", DataTypes.INTEGER); - mapping.put("text", DataTypes.TEXT); - mapping.put("keyword", DataTypes.KEYWORD); + Map mapping = TypesTests.loadMapping("mapping-multi-field-variation.json"); EsIndex test = new EsIndex("test", mapping); return verify(GetIndexResult.valid(test), sql); } diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java index 4982e9fa953..014bdaaee6c 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java @@ -10,12 +10,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.sql.execution.search.extractor.ConstantExtractorTests; -import org.elasticsearch.xpack.sql.execution.search.extractor.DocValueExtractorTests; +import org.elasticsearch.xpack.sql.execution.search.extractor.FieldHitExtractorTests; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractor; import org.elasticsearch.xpack.sql.execution.search.extractor.HitExtractors; -import org.elasticsearch.xpack.sql.execution.search.extractor.InnerHitExtractorTests; import org.elasticsearch.xpack.sql.execution.search.extractor.ProcessingHitExtractorTests; -import org.elasticsearch.xpack.sql.execution.search.extractor.SourceExtractorTests; import org.elasticsearch.xpack.sql.session.Cursor; import java.io.IOException; @@ -39,9 +37,7 @@ public class ScrollCursorTests extends AbstractWireSerializingTestCase ProcessingHitExtractorTests.randomProcessingHitExtractor(depth)); } options.add(ConstantExtractorTests::randomConstantExtractor); - options.add(DocValueExtractorTests::randomDocValueExtractor); - options.add(InnerHitExtractorTests::randomInnerHitExtractor); - options.add(SourceExtractorTests::randomSourceExtractor); + options.add(FieldHitExtractorTests::randomFieldHitExtractor); return randomFrom(options).get(); } diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java index 57a34569a1a..328afbd2712 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/SourceGeneratorTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilde import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.sql.expression.RootFieldAttribute; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.function.Score; import org.elasticsearch.xpack.sql.querydsl.agg.Aggs; import org.elasticsearch.xpack.sql.querydsl.agg.AvgAgg; @@ -28,12 +28,11 @@ import org.elasticsearch.xpack.sql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataTypes; +import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.search.sort.SortBuilders.fieldSort; import static org.elasticsearch.search.sort.SortBuilders.scoreSort; -import static java.util.Collections.emptyList; - public class SourceGeneratorTests extends ESTestCase { public void testNoQueryNoFilter() { @@ -81,26 +80,26 @@ public class SourceGeneratorTests extends ESTestCase { public void testSelectScoreForcesTrackingScore() { QueryContainer container = new QueryContainer() - .addColumn(new Score(new Location(1, 1)).toAttribute()); + .addColumn(new Score(new Location(1, 1)).toAttribute()); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertTrue(sourceBuilder.trackScores()); } public void testSortScoreSpecified() { QueryContainer container = new QueryContainer() - .sort(new ScoreSort(Direction.DESC)); + .sort(new ScoreSort(Direction.DESC)); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(scoreSort()), sourceBuilder.sorts()); } public void testSortFieldSpecified() { QueryContainer container = new QueryContainer() - .sort(new AttributeSort(new RootFieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.ASC)); + .sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.ASC)); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(fieldSort("test").order(SortOrder.ASC)), sourceBuilder.sorts()); container = new QueryContainer() - .sort(new AttributeSort(new RootFieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.DESC)); + .sort(new AttributeSort(new FieldAttribute(new Location(1, 1), "test", DataTypes.KEYWORD), Direction.DESC)); sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertEquals(singletonList(fieldSort("test").order(SortOrder.DESC)), sourceBuilder.sorts()); } @@ -112,9 +111,9 @@ public class SourceGeneratorTests extends ESTestCase { public void testNoSortIfAgg() { QueryContainer container = new QueryContainer() - .addGroups(singletonList(new GroupByColumnAgg("group_id", "", "group_column"))) - .addAgg("group_id", new AvgAgg("agg_id", "", "avg_column")); + .addGroups(singletonList(new GroupByColumnAgg("group_id", "", "group_column"))) + .addAgg("group_id", new AvgAgg("agg_id", "", "avg_column")); SearchSourceBuilder sourceBuilder = SourceGenerator.sourceBuilder(container, null, randomIntBetween(1, 10)); assertNull(sourceBuilder.sorts()); } -} +} \ No newline at end of file diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractorTests.java deleted file mode 100644 index 0a1a3e430e9..00000000000 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/DocValueExtractorTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -import static java.util.Collections.singletonMap; - -public class DocValueExtractorTests extends AbstractWireSerializingTestCase { - public static DocValueExtractor randomDocValueExtractor() { - return new DocValueExtractor(randomAlphaOfLength(5)); - } - - @Override - protected DocValueExtractor createTestInstance() { - return randomDocValueExtractor(); - } - - @Override - protected Reader instanceReader() { - return DocValueExtractor::new; - } - - @Override - protected DocValueExtractor mutateInstance(DocValueExtractor instance) throws IOException { - return new DocValueExtractor(instance.toString().substring(1) + "mutated"); - } - - public void testGet() { - String fieldName = randomAlphaOfLength(5); - DocValueExtractor extractor = new DocValueExtractor(fieldName); - - int times = between(1, 1000); - for (int i = 0; i < times; i++) { - List documentFieldValues = new ArrayList<>(); - documentFieldValues.add(new Object()); - if (randomBoolean()) { - documentFieldValues.add(new Object()); - } - SearchHit hit = new SearchHit(1); - DocumentField field = new DocumentField(fieldName, documentFieldValues); - hit.fields(singletonMap(fieldName, field)); - assertEquals(documentFieldValues.get(0), extractor.get(hit)); - } - } - - public void testToString() { - assertEquals("%incoming_links", new DocValueExtractor("incoming_links").toString()); - } -} diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java new file mode 100644 index 00000000000..cf6834bcd91 --- /dev/null +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.execution.search.extractor; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.execution.ExecutionException; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.is; + +public class FieldHitExtractorTests extends AbstractWireSerializingTestCase { + public static FieldHitExtractor randomFieldHitExtractor() { + return new FieldHitExtractor(randomAlphaOfLength(5), randomBoolean(), randomAlphaOfLength(5)); + } + + @Override + protected FieldHitExtractor createTestInstance() { + return randomFieldHitExtractor(); + } + + @Override + protected Reader instanceReader() { + return FieldHitExtractor::new; + } + + @Override + protected FieldHitExtractor mutateInstance(FieldHitExtractor instance) throws IOException { + return new FieldHitExtractor(instance.fieldName() + "mutated", true, instance.hitName()); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3082") + public void testGetNested() throws IOException { + fail("implement after we're sure of the InnerHitExtractor's implementation"); + } + + public void testGetDottedValueWithDocValues() { + String grandparent = randomAlphaOfLength(5); + String parent = randomAlphaOfLength(5); + String child = randomAlphaOfLength(5); + String fieldName = grandparent + "." + parent + "." + child; + + FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true); + + int times = between(1, 1000); + for (int i = 0; i < times; i++) { + + List documentFieldValues = new ArrayList<>(); + if (randomBoolean()) { + documentFieldValues.add(new Object()); + } + + SearchHit hit = new SearchHit(1); + DocumentField field = new DocumentField(fieldName, documentFieldValues); + hit.fields(singletonMap(fieldName, field)); + Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); + assertEquals(result, extractor.get(hit)); + } + } + + public void testGetDottedValueWithSource() throws Exception { + String grandparent = randomAlphaOfLength(5); + String parent = randomAlphaOfLength(5); + String child = randomAlphaOfLength(5); + String fieldName = grandparent + "." + parent + "." + child; + + FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false); + + int times = between(1, 1000); + for (int i = 0; i < times; i++) { + /* We use values that are parsed from json as "equal" to make the + * test simpler. */ + @SuppressWarnings("unchecked") + Supplier valueSupplier = randomFrom( + () -> randomAlphaOfLength(5), + () -> randomInt(), + () -> randomDouble()); + Object value = valueSupplier.get(); + SearchHit hit = new SearchHit(1); + XContentBuilder source = JsonXContent.contentBuilder(); + boolean hasGrandparent = randomBoolean(); + boolean hasParent = randomBoolean(); + boolean hasChild = randomBoolean(); + boolean hasSource = hasGrandparent && hasParent && hasChild; + + source.startObject(); + if (hasGrandparent) { + source.startObject(grandparent); + if (hasParent) { + source.startObject(parent); + if (hasChild) { + source.field(child, value); + if (randomBoolean()) { + source.field(fieldName + randomAlphaOfLength(3), value + randomAlphaOfLength(3)); + } + } + source.endObject(); + } + source.endObject(); + } + source.endObject(); + BytesReference sourceRef = source.bytes(); + hit.sourceRef(sourceRef); + Object extract = extractor.get(hit); + assertEquals(hasSource ? value : null, extract); + } + } + + public void testGetDocValue() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor extractor = new FieldHitExtractor(fieldName, true); + + int times = between(1, 1000); + for (int i = 0; i < times; i++) { + List documentFieldValues = new ArrayList<>(); + if (randomBoolean()) { + documentFieldValues.add(new Object()); + } + SearchHit hit = new SearchHit(1); + DocumentField field = new DocumentField(fieldName, documentFieldValues); + hit.fields(singletonMap(fieldName, field)); + Object result = documentFieldValues.isEmpty() ? null : documentFieldValues.get(0); + assertEquals(result, extractor.get(hit)); + } + } + + public void testGetSource() throws IOException { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor extractor = new FieldHitExtractor(fieldName, false); + + int times = between(1, 1000); + for (int i = 0; i < times; i++) { + /* We use values that are parsed from json as "equal" to make the + * test simpler. */ + @SuppressWarnings("unchecked") + Supplier valueSupplier = randomFrom( + () -> randomAlphaOfLength(5), + () -> randomInt(), + () -> randomDouble()); + Object value = valueSupplier.get(); + SearchHit hit = new SearchHit(1); + XContentBuilder source = JsonXContent.contentBuilder(); + source.startObject(); { + source.field(fieldName, value); + if (randomBoolean()) { + source.field(fieldName + "_random_junk", value + "_random_junk"); + } + } + source.endObject(); + BytesReference sourceRef = source.bytes(); + hit.sourceRef(sourceRef); + assertEquals(value, extractor.get(hit)); + } + } + + public void testToString() { + assertEquals("field@hit", new FieldHitExtractor("field", true, "hit").toString()); + } + + public void testMultiValuedDocValue() { + String fieldName = randomAlphaOfLength(5); + FieldHitExtractor fe = new FieldHitExtractor(fieldName, true); + SearchHit hit = new SearchHit(1); + DocumentField field = new DocumentField(fieldName, asList("a", "b")); + hit.fields(singletonMap(fieldName, field)); + ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.get(hit)); + assertThat(ex.getMessage(), is("Arrays (returned by [" + fieldName + "]) are not supported")); + } + + public void testExtractSourcePath() { + FieldHitExtractor fe = new FieldHitExtractor("a.b.c", false); + Object value = new Object(); + Map map = singletonMap("a", singletonMap("b", singletonMap("c", value))); + assertThat(fe.extractFromSource(map), is(value)); + } + + public void testExtractSourceIncorrectPath() { + FieldHitExtractor fe = new FieldHitExtractor("a.b.c.d", false); + Object value = new Object(); + Map map = singletonMap("a", singletonMap("b", singletonMap("c", value))); + ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.extractFromSource(map)); + assertThat(ex.getMessage(), is("Cannot extract value [a.b.c.d] from source")); + } + + public void testMultiValuedSource() { + FieldHitExtractor fe = new FieldHitExtractor("a", false); + Object value = new Object(); + Map map = singletonMap("a", asList(value, value)); + ExecutionException ex = expectThrows(ExecutionException.class, () -> fe.extractFromSource(map)); + assertThat(ex.getMessage(), is("Arrays (returned by [a]) are not supported")); + } +} \ No newline at end of file diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractorTests.java deleted file mode 100644 index b05cb8710fd..00000000000 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/InnerHitExtractorTests.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -import java.io.IOException; - -public class InnerHitExtractorTests extends AbstractWireSerializingTestCase { - public static InnerHitExtractor randomInnerHitExtractor() { - return new InnerHitExtractor(randomAlphaOfLength(5), randomAlphaOfLength(5), randomBoolean()); - } - - @Override - protected InnerHitExtractor createTestInstance() { - return randomInnerHitExtractor(); - } - - @Override - protected Reader instanceReader() { - return InnerHitExtractor::new; - } - - @Override - protected InnerHitExtractor mutateInstance(InnerHitExtractor instance) throws IOException { - return new InnerHitExtractor(instance.hitName() + "mustated", instance.fieldName(), true); - } - - @AwaitsFix(bugUrl = "https://github.com/elastic/x-pack-elasticsearch/issues/3082") - public void testGet() throws IOException { - fail("implement after we're sure of the InnerHitExtractor's implementation"); - } - - public void testToString() { - assertEquals("field@hit", new InnerHitExtractor("hit", "field", true).toString()); - } -} diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ProcessingHitExtractorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ProcessingHitExtractorTests.java index cda06186c62..dcbe0d78645 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ProcessingHitExtractorTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/ProcessingHitExtractorTests.java @@ -69,7 +69,7 @@ public class ProcessingHitExtractorTests extends AbstractWireSerializingTestCase public void testGet() { String fieldName = randomAlphaOfLength(5); - ChainingProcessor extractor = new ChainingProcessor(new HitExtractorProcessor(new DocValueExtractor(fieldName)), new MathProcessor(MathOperation.LOG)); + ChainingProcessor extractor = new ChainingProcessor(new HitExtractorProcessor(new FieldHitExtractor(fieldName, true)), new MathProcessor(MathOperation.LOG)); int times = between(1, 1000); for (int i = 0; i < times; i++) { diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractorTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractorTests.java deleted file mode 100644 index 2341c86e2b9..00000000000 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/SourceExtractorTests.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.sql.execution.search.extractor; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -import java.io.IOException; -import java.util.function.Supplier; - -public class SourceExtractorTests extends AbstractWireSerializingTestCase { - public static SourceExtractor randomSourceExtractor() { - return new SourceExtractor(randomAlphaOfLength(5)); - } - - @Override - protected SourceExtractor createTestInstance() { - return randomSourceExtractor(); - } - - @Override - protected Reader instanceReader() { - return SourceExtractor::new; - } - - @Override - protected SourceExtractor mutateInstance(SourceExtractor instance) throws IOException { - return new SourceExtractor(instance.toString().substring(1) + "mutated"); - } - - public void testGet() throws IOException { - String fieldName = randomAlphaOfLength(5); - SourceExtractor extractor = new SourceExtractor(fieldName); - - int times = between(1, 1000); - for (int i = 0; i < times; i++) { - /* We use values that are parsed from json as "equal" to make the - * test simpler. */ - @SuppressWarnings("unchecked") - Supplier valueSupplier = randomFrom( - () -> randomAlphaOfLength(5), - () -> randomInt(), - () -> randomDouble()); - Object value = valueSupplier.get(); - SearchHit hit = new SearchHit(1); - XContentBuilder source = JsonXContent.contentBuilder(); - source.startObject(); { - source.field(fieldName, value); - if (randomBoolean()) { - source.field(fieldName + "_random_junk", value + "_random_junk"); - } - } - source.endObject(); - BytesReference sourceRef = source.bytes(); - hit.sourceRef(sourceRef); - assertEquals(value, extractor.get(hit)); - } - } - - public void testToString() { - assertEquals("#name", new SourceExtractor("name").toString()); - } -} diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java index 5189dd3be70..1c2a0017068 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/expression/QuotingTests.java @@ -20,6 +20,7 @@ import java.util.Locale; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -80,9 +81,9 @@ public class QuotingTests extends ESTestCase { Expression exp = new SqlParser(DateTimeZone.UTC).createExpression(quote + qualifier + quote + "." + quote + name + quote); assertThat(exp, instanceOf(UnresolvedAttribute.class)); UnresolvedAttribute ua = (UnresolvedAttribute) exp; - assertThat(ua.name(), equalTo(name)); + assertThat(ua.name(), equalTo(qualifier + "." + name)); assertThat(ua.qualifiedName(), equalTo(qualifier + "." + name)); - assertThat(ua.qualifier(), equalTo(qualifier)); + assertThat(ua.qualifier(), is(nullValue())); } diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java new file mode 100644 index 00000000000..ec8b8abc51f --- /dev/null +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/parser/IdentifierBuilderTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.parser; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.tree.Location; + +import static org.hamcrest.Matchers.is; + +public class IdentifierBuilderTests extends ESTestCase { + + private static Location L = new Location(1, 10); + + public void testTypicalIndex() throws Exception { + IdentifierBuilder.validateIndex("some-index", L); + } + + public void testInternalIndex() throws Exception { + IdentifierBuilder.validateIndex(".some-internal-index-2020-02-02", L); + } + + public void testIndexPattern() throws Exception { + IdentifierBuilder.validateIndex(".some-*", L); + } + + public void testInvalidIndex() throws Exception { + ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("some,index", L)); + assertThat(pe.getMessage(), is("line 1:12: Invalid index name (illegal character ,) some,index")); + } + + public void testUpperCasedIndex() throws Exception { + ParsingException pe = expectThrows(ParsingException.class, () -> IdentifierBuilder.validateIndex("thisIsAnIndex", L)); + assertThat(pe.getMessage(), is("line 1:12: Invalid index name (needs to be lowercase) thisIsAnIndex")); + } +} diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java index e0fa83e63f3..62ad8171497 100644 --- a/sql/server/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/planner/VerifierErrorMessagesTests.java @@ -43,7 +43,6 @@ public class VerifierErrorMessagesTests extends ESTestCase { } public void testMultiGroupBy() { - // TODO: location needs to be updated after merging extend-having assertEquals("1:32: Currently, only a single expression can be used with GROUP BY; please select one of [bool, keyword]", verify("SELECT bool FROM test GROUP BY bool, keyword")); } diff --git a/sql/server/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java b/sql/server/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java new file mode 100644 index 00000000000..a0edbd8b454 --- /dev/null +++ b/sql/server/src/test/java/org/elasticsearch/xpack/sql/type/TypesTests.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.type; + +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.analysis.index.MappingException; + +import java.io.InputStream; +import java.util.List; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.xpack.sql.type.DataTypes.DATE; +import static org.elasticsearch.xpack.sql.type.DataTypes.INTEGER; +import static org.elasticsearch.xpack.sql.type.DataTypes.KEYWORD; +import static org.elasticsearch.xpack.sql.type.DataTypes.TEXT; +import static org.hamcrest.CoreMatchers.startsWith; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class TypesTests extends ESTestCase { + + public void testNullMap() throws Exception { + Map fromEs = Types.fromEs(null); + assertThat(fromEs.isEmpty(), is(true)); + } + + public void testEmptyMap() throws Exception { + Map fromEs = Types.fromEs(emptyMap()); + assertThat(fromEs.isEmpty(), is(true)); + } + + public void testBasicMapping() throws Exception { + Map mapping = loadMapping("mapping-basic.json"); + assertThat(mapping.size(), is(6)); + assertThat(mapping.get("emp_no"), is(INTEGER)); + assertThat(mapping.get("first_name"), instanceOf(TextType.class)); + assertThat(mapping.get("last_name"), is(TEXT)); + assertThat(mapping.get("gender"), is(KEYWORD)); + assertThat(mapping.get("salary"), is(INTEGER)); + } + + public void testDefaultStringMapping() throws Exception { + Map mapping = loadMapping("mapping-default-string.json"); + + assertThat(mapping.size(), is(1)); + assertThat(mapping.get("dep_no").same(TEXT), is(true)); + } + + public void testTextField() throws Exception { + Map mapping = loadMapping("mapping-text.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("full_name"); + assertThat(type, instanceOf(TextType.class)); + assertThat(type.hasDocValues(), is(false)); + TextType ttype = (TextType) type; + assertThat(type.precision(), is(Integer.MAX_VALUE)); + assertThat(ttype.hasFieldData(), is(false)); + } + + public void testKeywordField() throws Exception { + Map mapping = loadMapping("mapping-keyword.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("full_name"); + assertThat(type, instanceOf(KeywordType.class)); + assertThat(type.hasDocValues(), is(true)); + assertThat(type.precision(), is(256)); + } + + public void testDateField() throws Exception { + Map mapping = loadMapping("mapping-date.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("date"); + assertThat(type, is(DATE)); + assertThat(type.hasDocValues(), is(true)); + assertThat(type.precision(), is(19)); + + DateType dtype = (DateType) type; + List formats = dtype.formats(); + assertThat(formats, hasSize(3)); + } + + public void testDateNoFormat() throws Exception { + Map mapping = loadMapping("mapping-date-no-format.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("date"); + assertThat(type, is(DATE)); + assertThat(type.hasDocValues(), is(true)); + DateType dtype = (DateType) type; + // default types + assertThat(dtype.formats(), hasSize(2)); + } + + public void testDateMulti() throws Exception { + Map mapping = loadMapping("mapping-date-multi.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("date"); + assertThat(type, is(DATE)); + assertThat(type.hasDocValues(), is(true)); + DateType dtype = (DateType) type; + // default types + assertThat(dtype.formats(), hasSize(1)); + } + + public void testDocValueField() throws Exception { + Map mapping = loadMapping("mapping-docvalues.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("session_id"); + assertThat(type, instanceOf(KeywordType.class)); + assertThat(type.precision(), is(15)); + assertThat(type.hasDocValues(), is(false)); + } + + public void testDottedField() throws Exception { + Map mapping = loadMapping("mapping-object.json"); + + assertThat(mapping.size(), is(2)); + DataType type = mapping.get("manager"); + assertThat(type.isPrimitive(), is(false)); + assertThat(type, instanceOf(ObjectType.class)); + ObjectType ot = (ObjectType) type; + Map children = ot.properties(); + assertThat(children.size(), is(2)); + DataType names = children.get("name"); + children = ((ObjectType) names).properties(); + assertThat(children.size(), is(2)); + assertThat(children.get("first"), is(TEXT)); + } + + public void testMultiField() throws Exception { + Map mapping = loadMapping("mapping-multi-field.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("text"); + assertThat(type.isPrimitive(), is(true)); + assertThat(type, instanceOf(TextType.class)); + TextType tt = (TextType) type; + Map fields = tt.fields(); + assertThat(fields.size(), is(2)); + assertThat(fields.get("raw"), is(KEYWORD)); + assertThat(fields.get("english"), is(TEXT)); + } + + public void testMultiFieldTooManyOptions() throws Exception { + Map mapping = loadMapping("mapping-multi-field.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("text"); + assertThat(type.isPrimitive(), is(true)); + assertThat(type, instanceOf(TextType.class)); + TextType tt = (TextType) type; + Map fields = tt.fields(); + assertThat(fields.size(), is(2)); + assertThat(fields.get("raw"), is(KEYWORD)); + assertThat(fields.get("english"), is(TEXT)); + } + + public void testNestedDoc() throws Exception { + Map mapping = loadMapping("mapping-nested.json"); + + assertThat(mapping.size(), is(1)); + DataType type = mapping.get("dep"); + assertThat(type.isPrimitive(), is(false)); + assertThat(type, instanceOf(NestedType.class)); + NestedType ot = (NestedType) type; + Map children = ot.properties(); + assertThat(children.size(), is(4)); + assertThat(children.get("dep_name"), is(TEXT)); + assertThat(children.get("start_date"), is(DATE)); + } + + public void testGeoField() throws Exception { + MappingException ex = expectThrows(MappingException.class, () -> loadMapping("mapping-geo.json")); + assertThat(ex.getMessage(), is("Unsupported mapping type geo_point")); + } + + public void testUnsupportedTypes() throws Exception { + MappingException ex = expectThrows(MappingException.class, () -> loadMapping("mapping-unsupported.json")); + assertThat(ex.getMessage(), startsWith("Unsupported mapping type")); + } + + public static Map loadMapping(String name) { + InputStream stream = TypesTests.class.getResourceAsStream("/" + name); + assertNotNull("Could not find mapping resource:" + name, stream); + return Types.fromEs(XContentHelper.convertToMap(JsonXContent.jsonXContent, stream, randomBoolean())); + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-basic.json b/sql/server/src/test/resources/mapping-basic.json new file mode 100644 index 00000000000..c1747d1561c --- /dev/null +++ b/sql/server/src/test/resources/mapping-basic.json @@ -0,0 +1,22 @@ +{ + "properties" : { + "emp_no" : { + "type" : "integer" + }, + "first_name" : { + "type" : "text" + }, + "gender" : { + "type" : "keyword" + }, + "languages" : { + "type" : "byte" + }, + "last_name" : { + "type" : "text" + }, + "salary" : { + "type" : "integer" + } + } +} diff --git a/sql/server/src/test/resources/mapping-date-multi.json b/sql/server/src/test/resources/mapping-date-multi.json new file mode 100644 index 00000000000..e6cd9091f84 --- /dev/null +++ b/sql/server/src/test/resources/mapping-date-multi.json @@ -0,0 +1,9 @@ +{ + "properties": { + "date": { + "type": "date", + "format": "yyyy-MM-dd" + } + } + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-date-no-format.json b/sql/server/src/test/resources/mapping-date-no-format.json new file mode 100644 index 00000000000..e0e5fa852f5 --- /dev/null +++ b/sql/server/src/test/resources/mapping-date-no-format.json @@ -0,0 +1,8 @@ +{ + "properties": { + "date": { + "type": "date" + } + } + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-date.json b/sql/server/src/test/resources/mapping-date.json new file mode 100644 index 00000000000..0422d7e1026 --- /dev/null +++ b/sql/server/src/test/resources/mapping-date.json @@ -0,0 +1,9 @@ +{ + "properties": { + "date": { + "type": "date", + "format": "yyyy-MM-dd || basic_time || year" + } + } + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-default-string.json b/sql/server/src/test/resources/mapping-default-string.json new file mode 100644 index 00000000000..e8777a9cd68 --- /dev/null +++ b/sql/server/src/test/resources/mapping-default-string.json @@ -0,0 +1,13 @@ +{ + "properties" : { + "dep_no" : { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword", + "ignore_above" : 256 + } + } + } + } +} diff --git a/sql/server/src/test/resources/mapping-docvalues.json b/sql/server/src/test/resources/mapping-docvalues.json new file mode 100644 index 00000000000..5cd0ed200ce --- /dev/null +++ b/sql/server/src/test/resources/mapping-docvalues.json @@ -0,0 +1,9 @@ +{ + "properties" : { + "session_id" : { + "type" : "keyword", + "ignore_above" : 15, + "doc_values" : false + } + } +} diff --git a/sql/server/src/test/resources/mapping-geo.json b/sql/server/src/test/resources/mapping-geo.json new file mode 100644 index 00000000000..3c958ff37ed --- /dev/null +++ b/sql/server/src/test/resources/mapping-geo.json @@ -0,0 +1,7 @@ +{ + "properties" : { + "location" : { + "type" : "geo_point" + } + } +} diff --git a/sql/server/src/test/resources/mapping-keyword.json b/sql/server/src/test/resources/mapping-keyword.json new file mode 100644 index 00000000000..aa47e9e42ad --- /dev/null +++ b/sql/server/src/test/resources/mapping-keyword.json @@ -0,0 +1,8 @@ +{ + "properties" : { + "full_name" : { + "type" : "keyword", + "ignore_above" : 256 + } + } +} diff --git a/sql/server/src/test/resources/mapping-multi-field-options.json b/sql/server/src/test/resources/mapping-multi-field-options.json new file mode 100644 index 00000000000..f2389aed3d7 --- /dev/null +++ b/sql/server/src/test/resources/mapping-multi-field-options.json @@ -0,0 +1,15 @@ +{ + "properties" : { + "text" : { + "type" : "text", + "fields" : { + "raw" : { + "type" : "keyword" + }, + "key" : { + "type" : "keyword" + } + } + } + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-multi-field-variation.json b/sql/server/src/test/resources/mapping-multi-field-variation.json new file mode 100644 index 00000000000..bc26917fe92 --- /dev/null +++ b/sql/server/src/test/resources/mapping-multi-field-variation.json @@ -0,0 +1,46 @@ +{ + "properties" : { + "bool" : { "type" : "boolean" }, + "int" : { "type" : "integer" }, + "text" : { "type" : "text" }, + "keyword" : { "type" : "keyword" }, + "some" : { + "properties" : { + "dotted" : { + "properties" : { + "field" : { + "type" : "keyword" + } + } + }, + "string" : { + "type" : "text", + "fields" : { + "normalized" : { + "type" : "keyword", + "normalizer" : "some_normalizer" + }, + "typical" : { + "type" : "keyword" + } + } + }, + "ambiguous" : { + "type" : "text", + "fields" : { + "one" : { + "type" : "keyword" + }, + "two" : { + "type" : "keyword" + }, + "normalized" : { + "type" : "keyword", + "normalizer" : "some_normalizer" + } + } + } + } + } + } +} diff --git a/sql/server/src/test/resources/mapping-multi-field.json b/sql/server/src/test/resources/mapping-multi-field.json new file mode 100644 index 00000000000..9e293d42713 --- /dev/null +++ b/sql/server/src/test/resources/mapping-multi-field.json @@ -0,0 +1,16 @@ +{ + "properties" : { + "text" : { + "type" : "text", + "fields" : { + "raw" : { + "type" : "keyword" + }, + "english" : { + "type" : "text", + "analyzer" : "english" + } + } + } + } +} diff --git a/sql/server/src/test/resources/mapping-nested.json b/sql/server/src/test/resources/mapping-nested.json new file mode 100644 index 00000000000..d9b6398458f --- /dev/null +++ b/sql/server/src/test/resources/mapping-nested.json @@ -0,0 +1,27 @@ +{ + "properties" : { + "dep" : { + "type" : "nested", + "properties" : { + "dep_name" : { + "type" : "text" + }, + "dep_no" : { + "type" : "text", + "fields" : { + "keyword" : { + "type" : "keyword", + "ignore_above" : 256 + } + } + }, + "end_date" : { + "type" : "date" + }, + "start_date" : { + "type" : "date" + } + } + } + } +} \ No newline at end of file diff --git a/sql/server/src/test/resources/mapping-object.json b/sql/server/src/test/resources/mapping-object.json new file mode 100644 index 00000000000..65fd391f901 --- /dev/null +++ b/sql/server/src/test/resources/mapping-object.json @@ -0,0 +1,24 @@ +{ + "properties" : { + "region" : { + "type" : "keyword" + }, + "manager" : { + "properties" : { + "age" : { + "type" : "integer" + }, + "name" : { + "properties" : { + "first" : { + "type" : "text" + }, + "last" : { + "type" : "text" + } + } + } + } + } + } +} diff --git a/sql/server/src/test/resources/mapping-parent-child.json b/sql/server/src/test/resources/mapping-parent-child.json new file mode 100644 index 00000000000..b62e19625e2 --- /dev/null +++ b/sql/server/src/test/resources/mapping-parent-child.json @@ -0,0 +1,10 @@ +{ + "properties" : { + "parent_child" : { + "type" : "join", + "relations" : { + "question" : "answer" + } + } + } +} diff --git a/sql/server/src/test/resources/mapping-text.json b/sql/server/src/test/resources/mapping-text.json new file mode 100644 index 00000000000..ecf2f09c98a --- /dev/null +++ b/sql/server/src/test/resources/mapping-text.json @@ -0,0 +1,8 @@ +{ + "properties" : { + "full_name" : { + "type" : "text", + "fielddata" : false + } + } +} diff --git a/sql/server/src/test/resources/mapping-unsupported.json b/sql/server/src/test/resources/mapping-unsupported.json new file mode 100644 index 00000000000..832dc9c0d74 --- /dev/null +++ b/sql/server/src/test/resources/mapping-unsupported.json @@ -0,0 +1,11 @@ +{ + "properties" : { + "range" : { + "type" : "integer_range" + }, + "time_frame" : { + "type" : "date_range", + "format" : "yyyy-MM-dd" + } + } +}