From dadf96a840bf07b11f7dce2fcb8d6ed7dec25382 Mon Sep 17 00:00:00 2001
From: Yannick Welsch
Date: Wed, 11 Jul 2018 11:12:52 +0200
Subject: [PATCH 01/17] [TEST] Mute SlackMessageTests.testTemplateRender
---
.../watcher/notification/slack/message/SlackMessageTests.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
index b41e58b0612..740501eec4f 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
@@ -461,6 +461,7 @@ public class SlackMessageTests extends ESTestCase {
assertThat(parsed, equalTo(template));
}
+ @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31948")
public void testTemplateRender() throws Exception {
Settings settings = SlackMessageDefaultsTests.randomSettings();
SlackMessageDefaults defaults = new SlackMessageDefaults(settings);
From d268b494d7e305ca65ec28b8b62380eac97dba3b Mon Sep 17 00:00:00 2001
From: David Kyle
Date: Wed, 11 Jul 2018 10:17:44 +0100
Subject: [PATCH 02/17] [ML] Mute test failing due to Java 11 date time format
parsing bug (#31899)
---
.../xpack/core/ml/job/config/DataDescription.java | 3 ++-
.../time/DateTimeFormatterTimestampConverter.java | 6 +++---
.../core/ml/job/config/DataDescriptionTests.java | 14 ++++++++++++--
3 files changed, 17 insertions(+), 6 deletions(-)
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
index 9ff578be50b..6e9652bdfa2 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
@@ -353,7 +353,8 @@ public class DataDescription implements ToXContentObject, Writeable {
try {
DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC);
} catch (IllegalArgumentException e) {
- throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format));
+ throw ExceptionsHelper.badRequestException(
+ Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format), e.getCause());
}
}
timeFormat = format;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
index 556c2f37b48..0efb5feb38b 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
@@ -54,9 +54,9 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
.parseDefaulting(ChronoField.YEAR_OF_ERA, LocalDate.now(defaultTimezone).getYear())
.toFormatter();
- String now = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC));
+ String formattedTime = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC));
try {
- TemporalAccessor parsed = formatter.parse(now);
+ TemporalAccessor parsed = formatter.parse(formattedTime);
boolean hasTimeZone = parsed.isSupported(ChronoField.INSTANT_SECONDS);
if (hasTimeZone) {
Instant.from(parsed);
@@ -67,7 +67,7 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
return new DateTimeFormatterTimestampConverter(formatter, hasTimeZone, defaultTimezone);
}
catch (DateTimeException e) {
- throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern);
+ throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern, e);
}
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
index 3ca4bac47cb..bb7c329cf45 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
@@ -17,6 +17,8 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
+import java.time.DateTimeException;
+
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@@ -51,8 +53,12 @@ public class DataDescriptionTests extends AbstractSerializingTestCase description.setTimeFormat("y-M-dd"));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage());
expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
+
+ Throwable cause = e.getCause();
+ assertNotNull(cause);
+ assertThat(cause, instanceOf(DateTimeException.class));
}
public void testTransform_GivenDelimitedAndEpoch() {
From aa6a1c5ca0448372953d4f117d95e9b57385182d Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Wed, 11 Jul 2018 09:18:04 -0400
Subject: [PATCH 03/17] Switch high level rest tests to new style requests
(#31937)
In #29623 we added `Request` object flavored requests to the low level
REST client and in #30315 we deprecated the old `performRequest`s. This
changes all calls in the `client/rest-high-level` project to use the new
versions.
---
.../elasticsearch/client/IndicesClientIT.java | 16 +-
.../elasticsearch/client/PingAndInfoIT.java | 2 +-
.../org/elasticsearch/client/RankEvalIT.java | 39 ++-
.../org/elasticsearch/client/SearchIT.java | 264 ++++++++++--------
.../documentation/CRUDDocumentationIT.java | 43 ++-
5 files changed, 192 insertions(+), 172 deletions(-)
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
index 39070a07b31..88cf445d436 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java
@@ -612,7 +612,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
createIndex(index, Settings.EMPTY);
closeIndex(index);
ResponseException exception = expectThrows(ResponseException.class,
- () -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search"));
+ () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")));
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(index), equalTo(true));
@@ -621,7 +621,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
highLevelClient().indices()::openAsync);
assertTrue(openIndexResponse.isAcknowledged());
- Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search");
+ Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
@@ -650,7 +650,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testCloseExistingIndex() throws IOException {
String index = "index";
createIndex(index, Settings.EMPTY);
- Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search");
+ Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(index);
@@ -659,7 +659,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertTrue(closeIndexResponse.isAcknowledged());
ResponseException exception = expectThrows(ResponseException.class,
- () -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search"));
+ () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")));
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(index), equalTo(true));
}
@@ -817,7 +817,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync));
createIndex("index", Settings.EMPTY);
- client().performRequest(HttpPut.METHOD_NAME, "/index/_alias/alias");
+ client().performRequest(new Request(HttpPut.METHOD_NAME, "/index/_alias/alias"));
assertTrue(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync));
GetAliasesRequest getAliasesRequest2 = new GetAliasesRequest();
@@ -936,10 +936,10 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testGetAlias() throws IOException {
{
createIndex("index1", Settings.EMPTY);
- client().performRequest(HttpPut.METHOD_NAME, "/index1/_alias/alias1");
+ client().performRequest(new Request(HttpPut.METHOD_NAME, "/index1/_alias/alias1"));
createIndex("index2", Settings.EMPTY);
- client().performRequest(HttpPut.METHOD_NAME, "/index2/_alias/alias2");
+ client().performRequest(new Request(HttpPut.METHOD_NAME, "/index2/_alias/alias2"));
createIndex("index3", Settings.EMPTY);
}
@@ -1075,7 +1075,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertThat(getAliasesResponse.getError(), equalTo("alias [" + alias + "] missing"));
}
createIndex(index, Settings.EMPTY);
- client().performRequest(HttpPut.METHOD_NAME, index + "/_alias/" + alias);
+ client().performRequest(new Request(HttpPut.METHOD_NAME, index + "/_alias/" + alias));
{
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index");
GetAliasesResponse getAliasesResponse = execute(getAliasesRequest, highLevelClient().indices()::getAlias,
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java
index 1a50187f5df..b45f52f9e44 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java
@@ -39,7 +39,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
public void testInfo() throws IOException {
MainResponse info = highLevelClient().info(RequestOptions.DEFAULT);
// compare with what the low level client outputs
- Map infoAsMap = entityAsMap(adminClient().performRequest(HttpGet.METHOD_NAME, "/"));
+ Map infoAsMap = entityAsMap(adminClient().performRequest(new Request(HttpGet.METHOD_NAME, "/")));
assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value());
assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid());
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java
index a7a452484e0..d61fccb9371 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java
@@ -19,8 +19,6 @@
package org.elasticsearch.client;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
@@ -37,7 +35,6 @@ import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -49,19 +46,17 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
@Before
public void indexDocuments() throws IOException {
- StringEntity doc = new StringEntity("{\"text\":\"berlin\"}", ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "/index/doc/1", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"text\":\"amsterdam\"}", ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "/index/doc/2", Collections.emptyMap(), doc);
- client().performRequest("PUT", "/index/doc/3", Collections.emptyMap(), doc);
- client().performRequest("PUT", "/index/doc/4", Collections.emptyMap(), doc);
- client().performRequest("PUT", "/index/doc/5", Collections.emptyMap(), doc);
- client().performRequest("PUT", "/index/doc/6", Collections.emptyMap(), doc);
- client().performRequest("POST", "/index/_refresh");
-
- // add another index to test basic multi index support
- client().performRequest("PUT", "/index2/doc/7", Collections.emptyMap(), doc);
- client().performRequest("POST", "/index2/_refresh");
+ Request berlin = new Request("PUT", "/index/doc/berlin");
+ berlin.setJsonEntity("{\"text\":\"berlin\"}");
+ client().performRequest(berlin);
+ for (int i = 0; i < 6; i++) {
+ // add another index to test basic multi index support
+ String index = i == 0 ? "index2" : "index";
+ Request amsterdam = new Request("PUT", "/" + index + "/doc/amsterdam" + i);
+ amsterdam.setJsonEntity("{\"text\":\"amsterdam\"}");
+ client().performRequest(amsterdam);
+ }
+ client().performRequest(new Request("POST", "/_refresh"));
}
/**
@@ -71,10 +66,10 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
public void testRankEvalRequest() throws IOException {
SearchSourceBuilder testQuery = new SearchSourceBuilder();
testQuery.query(new MatchAllQueryBuilder());
- List amsterdamRatedDocs = createRelevant("index" , "2", "3", "4", "5");
- amsterdamRatedDocs.addAll(createRelevant("index2", "7"));
+ List amsterdamRatedDocs = createRelevant("index" , "amsterdam1", "amsterdam2", "amsterdam3", "amsterdam4");
+ amsterdamRatedDocs.addAll(createRelevant("index2", "amsterdam0"));
RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery);
- RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "1"), testQuery);
+ RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "berlin"), testQuery);
List specifications = new ArrayList<>();
specifications.add(amsterdamRequest);
specifications.add(berlinRequest);
@@ -94,7 +89,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {
String id = hit.getSearchHit().getId();
- if (id.equals("1") || id.equals("6")) {
+ if (id.equals("berlin") || id.equals("amsterdam5")) {
assertFalse(hit.getRating().isPresent());
} else {
assertEquals(1, hit.getRating().get().intValue());
@@ -106,7 +101,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) {
String id = hit.getSearchHit().getId();
- if (id.equals("1")) {
+ if (id.equals("berlin")) {
assertEquals(1, hit.getRating().get().intValue());
} else {
assertFalse(hit.getRating().isPresent());
@@ -114,7 +109,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
}
// now try this when test2 is closed
- client().performRequest("POST", "index2/_close", Collections.emptyMap());
+ client().performRequest(new Request("POST", "index2/_close"));
rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS));
response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync);
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
index 18a43ffa8d4..ce9091a91ff 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
@@ -19,12 +19,8 @@
package org.elasticsearch.client;
-import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.explain.ExplainRequest;
@@ -101,85 +97,106 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
@Before
public void indexDocuments() throws IOException {
- StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/1", Collections.emptyMap(), doc1);
- StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/2", Collections.emptyMap(), doc2);
- StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/3", Collections.emptyMap(), doc3);
- StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/4", Collections.emptyMap(), doc4);
- StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/5", Collections.emptyMap(), doc5);
- client().performRequest(HttpPost.METHOD_NAME, "/index/_refresh");
+ {
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
+ doc1.setJsonEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}");
+ client().performRequest(doc1);
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
+ doc2.setJsonEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}");
+ client().performRequest(doc2);
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
+ doc3.setJsonEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}");
+ client().performRequest(doc3);
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
+ doc4.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
+ client().performRequest(doc4);
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
+ doc5.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
+ client().performRequest(doc5);
+ }
+ {
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/doc/1");
+ doc1.setJsonEntity("{\"field\":\"value1\", \"rating\": 7}");
+ client().performRequest(doc1);
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index1/doc/2");
+ doc2.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc2);
+ }
- StringEntity doc = new StringEntity("{\"field\":\"value1\", \"rating\": 7}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/1", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/2", Collections.emptyMap(), doc);
-
- StringEntity mappings = new StringEntity(
- "{" +
- " \"mappings\": {" +
- " \"doc\": {" +
- " \"properties\": {" +
- " \"rating\": {" +
- " \"type\": \"keyword\"" +
- " }" +
- " }" +
- " }" +
- " }" +
- "}}",
- ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "/index2", Collections.emptyMap(), mappings);
- doc = new StringEntity("{\"field\":\"value1\", \"rating\": \"good\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/3", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/4", Collections.emptyMap(), doc);
-
- doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc);
-
- mappings = new StringEntity(
- "{" +
+ {
+ Request create = new Request("PUT", "/index2");
+ create.setJsonEntity(
+ "{" +
" \"mappings\": {" +
" \"doc\": {" +
" \"properties\": {" +
- " \"field1\": {" +
- " \"type\": \"keyword\"," +
- " \"store\": true" +
- " }," +
- " \"field2\": {" +
- " \"type\": \"keyword\"," +
- " \"store\": true" +
+ " \"rating\": {" +
+ " \"type\": \"keyword\"" +
" }" +
" }" +
" }" +
" }" +
- "}}",
- ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings);
- doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc);
- StringEntity aliasFilter = new StringEntity(
- "{" +
- " \"actions\" : [" +
- " {" +
- " \"add\" : {" +
- " \"index\" : \"index4\"," +
- " \"alias\" : \"alias4\"," +
- " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
- " }" +
- " }" +
- " ]" +
- "}",
- ContentType.APPLICATION_JSON);
- client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter);
+ "}");
+ client().performRequest(create);
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/doc/3");
+ doc3.setJsonEntity("{\"field\":\"value1\", \"rating\": \"good\"}");
+ client().performRequest(doc3);
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index2/doc/4");
+ doc4.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc4);
+ }
- client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh");
+ {
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index3/doc/5");
+ doc5.setJsonEntity("{\"field\":\"value1\"}");
+ client().performRequest(doc5);
+ Request doc6 = new Request(HttpPut.METHOD_NAME, "/index3/doc/6");
+ doc6.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc6);
+ }
+
+ {
+ Request create = new Request(HttpPut.METHOD_NAME, "/index4");
+ create.setJsonEntity(
+ "{" +
+ " \"mappings\": {" +
+ " \"doc\": {" +
+ " \"properties\": {" +
+ " \"field1\": {" +
+ " \"type\": \"keyword\"," +
+ " \"store\": true" +
+ " }," +
+ " \"field2\": {" +
+ " \"type\": \"keyword\"," +
+ " \"store\": true" +
+ " }" +
+ " }" +
+ " }" +
+ " }" +
+ "}");
+ client().performRequest(create);
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index4/doc/1");
+ doc1.setJsonEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}");
+ client().performRequest(doc1);
+
+ Request createFilteredAlias = new Request(HttpPost.METHOD_NAME, "/_aliases");
+ createFilteredAlias.setJsonEntity(
+ "{" +
+ " \"actions\" : [" +
+ " {" +
+ " \"add\" : {" +
+ " \"index\" : \"index4\"," +
+ " \"alias\" : \"alias4\"," +
+ " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
+ " }" +
+ " }" +
+ " ]" +
+ "}");
+ client().performRequest(createFilteredAlias);
+ }
+
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
}
public void testSearchNoQuery() throws IOException {
@@ -377,7 +394,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
public void testSearchWithParentJoin() throws IOException {
final String indexName = "child_example";
- StringEntity parentMapping = new StringEntity("{\n" +
+ Request createIndex = new Request(HttpPut.METHOD_NAME, "/" + indexName);
+ createIndex.setJsonEntity(
+ "{\n" +
" \"mappings\": {\n" +
" \"qa\" : {\n" +
" \"properties\" : {\n" +
@@ -388,9 +407,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" }\n" +
" }\n" +
" }" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName, Collections.emptyMap(), parentMapping);
- StringEntity questionDoc = new StringEntity("{\n" +
+ "}");
+ client().performRequest(createIndex);
+ Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1");
+ questionDoc.setJsonEntity(
+ "{\n" +
" \"body\": \"I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" +
" \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" +
" \"tags\": [\n" +
@@ -399,9 +420,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"file-transfer\"\n" +
" ],\n" +
" \"qa_join_field\" : \"question\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1", Collections.emptyMap(), questionDoc);
- StringEntity answerDoc1 = new StringEntity("{\n" +
+ "}");
+ client().performRequest(questionDoc);
+ Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2");
+ answerDoc1.addParameter("routing", "1");
+ answerDoc1.setJsonEntity(
+ "{\n" +
" \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Sam\",\n" +
@@ -413,9 +437,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" +
" },\n" +
" \"creation_date\": \"2009-05-04T13:45:37.030\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2", Collections.singletonMap("routing", "1"), answerDoc1);
- StringEntity answerDoc2 = new StringEntity("{\n" +
+ "}");
+ client().performRequest(answerDoc1);
+ Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3");
+ answerDoc2.addParameter("routing", "1");
+ answerDoc2.setJsonEntity(
+ "{\n" +
" \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Troll\",\n" +
@@ -427,9 +454,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" +
" },\n" +
" \"creation_date\": \"2009-05-05T13:45:37.030\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3", Collections.singletonMap("routing", "1"), answerDoc2);
- client().performRequest(HttpPost.METHOD_NAME, "/_refresh");
+ "}");
+ client().performRequest(answerDoc2);
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING)
.field("owner.display_name.keyword").size(10);
@@ -506,9 +533,10 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
}
public void testSearchWithWeirdScriptFields() throws Exception {
- HttpEntity entity = new NStringEntity("{ \"field\":\"value\"}", ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "test/type/1", Collections.emptyMap(), entity);
- client().performRequest("POST", "/test/_refresh");
+ Request doc = new Request("PUT", "test/type/1");
+ doc.setJsonEntity("{\"field\":\"value\"}");
+ client().performRequest(doc);
+ client().performRequest(new Request("POST", "/test/_refresh"));
{
SearchRequest searchRequest = new SearchRequest("test").source(SearchSourceBuilder.searchSource()
@@ -547,13 +575,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
}
public void testSearchScroll() throws Exception {
-
for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
- HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity);
+ Request doc = new Request(HttpPut.METHOD_NAME, "/test/type1/" + Integer.toString(i));
+ doc.setJsonEntity(Strings.toString(builder));
+ client().performRequest(doc);
}
- client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh");
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh"));
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC);
SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder);
@@ -878,11 +906,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
}
-
-
+
+
public void testMultiSearchTemplate() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
-
+
SearchTemplateRequest goodRequest = new SearchTemplateRequest();
goodRequest.setRequest(new SearchRequest("index"));
goodRequest.setScriptType(ScriptType.INLINE);
@@ -900,8 +928,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
goodRequest.setExplain(true);
goodRequest.setProfile(true);
multiSearchTemplateRequest.add(goodRequest);
-
-
+
+
SearchTemplateRequest badRequest = new SearchTemplateRequest();
badRequest.setRequest(new SearchRequest("index"));
badRequest.setScriptType(ScriptType.INLINE);
@@ -910,17 +938,17 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", 10);
badRequest.setScriptParams(scriptParams);
- multiSearchTemplateRequest.add(badRequest);
-
+ multiSearchTemplateRequest.add(badRequest);
+
MultiSearchTemplateResponse multiSearchTemplateResponse =
- execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
+ execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync);
-
+
Item[] responses = multiSearchTemplateResponse.getResponses();
-
+
assertEquals(2, responses.length);
-
-
+
+
assertNull(responses[0].getResponse().getSource());
SearchResponse goodResponse =responses[0].getResponse().getResponse();
assertNotNull(goodResponse);
@@ -930,18 +958,18 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f));
SearchHit hit = goodResponse.getHits().getHits()[0];
assertNotNull(hit.getExplanation());
- assertFalse(goodResponse.getProfileResults().isEmpty());
-
-
+ assertFalse(goodResponse.getProfileResults().isEmpty());
+
+
assertNull(responses[0].getResponse().getSource());
assertThat(responses[1].isFailure(), Matchers.is(true));
- assertNotNull(responses[1].getFailureMessage());
+ assertNotNull(responses[1].getFailureMessage());
assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception"));
}
-
+
public void testMultiSearchTemplateAllBad() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
-
+
SearchTemplateRequest badRequest1 = new SearchTemplateRequest();
badRequest1.setRequest(new SearchRequest("index"));
badRequest1.setScriptType(ScriptType.INLINE);
@@ -957,8 +985,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER");
badRequest1.setScriptParams(scriptParams);
multiSearchTemplateRequest.add(badRequest1);
-
-
+
+
SearchTemplateRequest badRequest2 = new SearchTemplateRequest();
badRequest2.setRequest(new SearchRequest("index"));
badRequest2.setScriptType(ScriptType.INLINE);
@@ -967,13 +995,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER");
badRequest2.setScriptParams(scriptParams);
- multiSearchTemplateRequest.add(badRequest2);
-
- // The whole HTTP request should fail if no nested search requests are valid
+ multiSearchTemplateRequest.add(badRequest2);
+
+ // The whole HTTP request should fail if no nested search requests are valid
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
- () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
+ () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync));
-
+
assertEquals(RestStatus.BAD_REQUEST, exception.status());
assertThat(exception.getMessage(), containsString("no requests added"));
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
index b8a6b7d2d8a..9dad115643c 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
@@ -19,8 +19,6 @@
package org.elasticsearch.client.documentation;
-import org.apache.http.entity.ContentType;
-import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
@@ -66,7 +64,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
-import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@@ -756,7 +753,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testGet() throws Exception {
RestHighLevelClient client = highLevelClient();
{
- String mappings = "{\n" +
+ Request createIndex = new Request("PUT", "/posts");
+ createIndex.setJsonEntity(
+ "{\n" +
" \"mappings\" : {\n" +
" \"doc\" : {\n" +
" \"properties\" : {\n" +
@@ -767,10 +766,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
" }\n" +
" }\n" +
" }\n" +
- "}";
-
- NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON);
- Response response = client().performRequest("PUT", "/posts", Collections.emptyMap(), entity);
+ "}");
+ Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
IndexRequest indexRequest = new IndexRequest("posts", "doc", "1")
@@ -1071,21 +1068,21 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
- String mappings = "{\n" +
- " \"mappings\" : {\n" +
- " \"type\" : {\n" +
- " \"properties\" : {\n" +
- " \"foo\" : {\n" +
- " \"type\": \"text\",\n" +
- " \"store\": true\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON);
- Response response = client().performRequest("PUT", "/index", Collections.emptyMap(), entity);
+ Request createIndex = new Request("PUT", "/index");
+ createIndex.setJsonEntity(
+ "{\n" +
+ " \"mappings\" : {\n" +
+ " \"type\" : {\n" +
+ " \"properties\" : {\n" +
+ " \"foo\" : {\n" +
+ " \"type\": \"text\",\n" +
+ " \"store\": true\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ "}");
+ Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
}
From eda6d182b541f3b563d64956f60efa5eef9d1cd1 Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Wed, 11 Jul 2018 09:48:47 -0400
Subject: [PATCH 04/17] Switch low level rest tests to new style Requests
(#31938)
In #29623 we added `Request` object flavored requests to the low level
REST client and in #30315 we deprecated the old `performRequest`s. This
changes all calls in the `client/rest` project to use the new versions.
---
.../client/RestClientBuilderIntegTests.java | 4 +-
.../RestClientSingleHostIntegTests.java | 38 +++++++++++++------
.../RestClientDocumentation.java | 2 +-
3 files changed, 30 insertions(+), 14 deletions(-)
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
index 199b7542e62..93f8481bea6 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
@@ -76,7 +76,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
try {
try (RestClient client = buildRestClient()) {
try {
- client.performRequest("GET", "/");
+ client.performRequest(new Request("GET", "/"));
fail("connection should have been rejected due to SSL handshake");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("General SSLEngine problem"));
@@ -85,7 +85,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
SSLContext.setDefault(getSslContext());
try (RestClient client = buildRestClient()) {
- Response response = client.performRequest("GET", "/");
+ Response response = client.performRequest(new Request("GET", "/"));
assertEquals(200, response.getStatusLine().getStatusCode());
}
} finally {
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
index 114d34c73da..6b5bb3c98ee 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
@@ -256,35 +256,51 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testEncodeParams() throws IOException {
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this/is/the/routing"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "this/is/the/routing");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%2Fis%2Fthe%2Frouting", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this|is|the|routing"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "this|is|the|routing");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%7Cis%7Cthe%7Crouting", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "routing#1"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "routing#1");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=routing%231", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "中文"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "中文");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=%E4%B8%AD%E6%96%87", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo+bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo+bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Bbar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo/bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo/bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Fbar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo^bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo^bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%5Ebar", response.getRequestLine().getUri());
}
}
@@ -341,14 +357,14 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testUrlWithoutLeadingSlash() throws Exception {
if (pathPrefix.length() == 0) {
try {
- restClient.performRequest("GET", "200");
+ restClient.performRequest(new Request("GET", "200"));
fail("request should have failed");
} catch (ResponseException e) {
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
}
} else {
{
- Response response = restClient.performRequest("GET", "200");
+ Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode());
}
@@ -357,7 +373,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
try (RestClient restClient = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()))
.setPathPrefix(pathPrefix.substring(1)).build()) {
- Response response = restClient.performRequest("GET", "200");
+ Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode());
}
diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
index d347353a1fb..ce2e0907560 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
@@ -267,7 +267,7 @@ public class RestClientDocumentation {
}
{
//tag::rest-client-response2
- Response response = restClient.performRequest("GET", "/");
+ Response response = restClient.performRequest(new Request("GET", "/"));
RequestLine requestLine = response.getRequestLine(); // <1>
HttpHost host = response.getHost(); // <2>
int statusCode = response.getStatusLine().getStatusCode(); // <3>
From 38e09a1508bbef0e67ca95e46ace32f7aa1a08dc Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Wed, 11 Jul 2018 10:04:17 -0400
Subject: [PATCH 05/17] Switch test framework to new style requests (#31939)
In #29623 we added `Request` object flavored requests to the low level
REST client and in #30315 we deprecated the old `performRequest`s. This
changes all calls in the `test/framework` project to use the new
versions.
---
.../test/rest/ESRestTestCase.java | 86 +++++++++----------
.../rest/yaml/ESClientYamlSuiteTestCase.java | 7 +-
2 files changed, 43 insertions(+), 50 deletions(-)
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
index 81a9598496b..937adddf3a4 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
@@ -21,12 +21,6 @@ package org.elasticsearch.test.rest;
import org.apache.http.Header;
import org.apache.http.HttpHost;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpHead;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.ssl.SSLContexts;
@@ -68,16 +62,12 @@ import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singletonMap;
import static java.util.Collections.sort;
import static java.util.Collections.unmodifiableList;
import static org.hamcrest.Matchers.anyOf;
@@ -307,25 +297,25 @@ public abstract class ESRestTestCase extends ESTestCase {
* the snapshots intact in the repository.
*/
private void wipeSnapshots() throws IOException {
- for (Map.Entry repo : entityAsMap(adminClient.performRequest("GET", "_snapshot/_all")).entrySet()) {
+ for (Map.Entry repo : entityAsMap(adminClient.performRequest(new Request("GET", "/_snapshot/_all"))).entrySet()) {
String repoName = repo.getKey();
Map, ?> repoSpec = (Map, ?>) repo.getValue();
String repoType = (String) repoSpec.get("type");
if (false == preserveSnapshotsUponCompletion() && repoType.equals("fs")) {
// All other repo types we really don't have a chance of being able to iterate properly, sadly.
- String url = "_snapshot/" + repoName + "/_all";
- Map params = singletonMap("ignore_unavailable", "true");
- List> snapshots = (List>) entityAsMap(adminClient.performRequest("GET", url, params)).get("snapshots");
+ Request listRequest = new Request("GET", "/_snapshot/" + repoName + "/_all");
+ listRequest.addParameter("ignore_unavailable", "true");
+ List> snapshots = (List>) entityAsMap(adminClient.performRequest(listRequest)).get("snapshots");
for (Object snapshot : snapshots) {
Map, ?> snapshotInfo = (Map, ?>) snapshot;
String name = (String) snapshotInfo.get("snapshot");
logger.debug("wiping snapshot [{}/{}]", repoName, name);
- adminClient().performRequest("DELETE", "_snapshot/" + repoName + "/" + name);
+ adminClient().performRequest(new Request("DELETE", "/_snapshot/" + repoName + "/" + name));
}
}
if (preserveReposUponCompletion() == false) {
logger.debug("wiping snapshot repository [{}]", repoName);
- adminClient().performRequest("DELETE", "_snapshot/" + repoName);
+ adminClient().performRequest(new Request("DELETE", "_snapshot/" + repoName));
}
}
}
@@ -334,7 +324,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* Remove any cluster settings.
*/
private void wipeClusterSettings() throws IOException {
- Map, ?> getResponse = entityAsMap(adminClient().performRequest("GET", "/_cluster/settings"));
+ Map, ?> getResponse = entityAsMap(adminClient().performRequest(new Request("GET", "/_cluster/settings")));
boolean mustClear = false;
XContentBuilder clearCommand = JsonXContent.contentBuilder();
@@ -355,8 +345,9 @@ public abstract class ESRestTestCase extends ESTestCase {
clearCommand.endObject();
if (mustClear) {
- adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity(
- Strings.toString(clearCommand), ContentType.APPLICATION_JSON));
+ Request request = new Request("PUT", "/_cluster/settings");
+ request.setJsonEntity(Strings.toString(clearCommand));
+ adminClient().performRequest(request);
}
}
@@ -365,7 +356,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* other tests.
*/
private void logIfThereAreRunningTasks() throws InterruptedException, IOException {
- Set runningTasks = runningTasks(adminClient().performRequest("GET", "_tasks"));
+ Set runningTasks = runningTasks(adminClient().performRequest(new Request("GET", "/_tasks")));
// Ignore the task list API - it doesn't count against us
runningTasks.remove(ListTasksAction.NAME);
runningTasks.remove(ListTasksAction.NAME + "[n]");
@@ -389,7 +380,7 @@ public abstract class ESRestTestCase extends ESTestCase {
private void waitForClusterStateUpdatesToFinish() throws Exception {
assertBusy(() -> {
try {
- Response response = adminClient().performRequest("GET", "_cluster/pending_tasks");
+ Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks"));
List> tasks = (List>) entityAsMap(response).get("tasks");
if (false == tasks.isEmpty()) {
StringBuilder message = new StringBuilder("there are still running tasks:");
@@ -514,12 +505,12 @@ public abstract class ESRestTestCase extends ESTestCase {
* @param index index to test for
**/
protected static void ensureGreen(String index) throws IOException {
- Map params = new HashMap<>();
- params.put("wait_for_status", "green");
- params.put("wait_for_no_relocating_shards", "true");
- params.put("timeout", "70s");
- params.put("level", "shards");
- assertOK(client().performRequest("GET", "_cluster/health/" + index, params));
+ Request request = new Request("GET", "/_cluster/health/" + index);
+ request.addParameter("wait_for_status", "green");
+ request.addParameter("wait_for_no_relocating_shards", "true");
+ request.addParameter("timeout", "70s");
+ request.addParameter("level", "shards");
+ client().performRequest(request);
}
/**
@@ -527,11 +518,11 @@ public abstract class ESRestTestCase extends ESTestCase {
* in the cluster and doesn't require to know how many nodes/replica there are.
*/
protected static void ensureNoInitializingShards() throws IOException {
- Map params = new HashMap<>();
- params.put("wait_for_no_initializing_shards", "true");
- params.put("timeout", "70s");
- params.put("level", "shards");
- assertOK(client().performRequest("GET", "_cluster/health/", params));
+ Request request = new Request("GET", "/_cluster/health");
+ request.addParameter("wait_for_no_initializing_shards", "true");
+ request.addParameter("timeout", "70s");
+ request.addParameter("level", "shards");
+ client().performRequest(request);
}
protected static void createIndex(String name, Settings settings) throws IOException {
@@ -539,9 +530,10 @@ public abstract class ESRestTestCase extends ESTestCase {
}
protected static void createIndex(String name, Settings settings, String mapping) throws IOException {
- assertOK(client().performRequest(HttpPut.METHOD_NAME, name, Collections.emptyMap(),
- new StringEntity("{ \"settings\": " + Strings.toString(settings)
- + ", \"mappings\" : {" + mapping + "} }", ContentType.APPLICATION_JSON)));
+ Request request = new Request("PUT", "/" + name);
+ request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings)
+ + ", \"mappings\" : {" + mapping + "} }");
+ client().performRequest(request);
}
protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException {
@@ -549,42 +541,42 @@ public abstract class ESRestTestCase extends ESTestCase {
}
private static void updateIndexSettings(String index, Settings settings) throws IOException {
- assertOK(client().performRequest("PUT", index + "/_settings", Collections.emptyMap(),
- new StringEntity(Strings.toString(settings), ContentType.APPLICATION_JSON)));
+ Request request = new Request("PUT", "/" + index + "/_settings");
+ request.setJsonEntity(Strings.toString(settings));
+ client().performRequest(request);
}
protected static Map getIndexSettings(String index) throws IOException {
- Map params = new HashMap<>();
- params.put("flat_settings", "true");
- Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_settings", params);
- assertOK(response);
+ Request request = new Request("GET", "/" + index + "/_settings");
+ request.addParameter("flat_settings", "true");
+ Response response = client().performRequest(request);
try (InputStream is = response.getEntity().getContent()) {
return XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true);
}
}
protected static boolean indexExists(String index) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, index);
+ Response response = client().performRequest(new Request("HEAD", "/" + index));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
protected static void closeIndex(String index) throws IOException {
- Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_close");
+ Response response = client().performRequest(new Request("POST", "/" + index + "/_close"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
protected static void openIndex(String index) throws IOException {
- Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_open");
+ Response response = client().performRequest(new Request("POST", "/" + index + "/_open"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
protected static boolean aliasExists(String alias) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, "/_alias/" + alias);
+ Response response = client().performRequest(new Request("HEAD", "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
protected static boolean aliasExists(String index, String alias) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, "/" + index + "/_alias/" + alias);
+ Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
@@ -602,7 +594,7 @@ public abstract class ESRestTestCase extends ESTestCase {
}
protected static Map getAsMap(final String endpoint) throws IOException {
- Response response = client().performRequest(HttpGet.METHOD_NAME, endpoint);
+ Response response = client().performRequest(new Request("GET", endpoint));
XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue());
Map responseEntity = XContentHelper.convertToMap(entityContentType.xContent(),
response.getEntity().getContent(), false);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
index 69f4e0666ea..b97b4e8f6da 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
@@ -47,7 +47,6 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
@@ -282,7 +281,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException {
// we simply go to the _cat/nodes API and parse all versions in the cluster
- Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master"));
+ Request request = new Request("GET", "/_cat/nodes");
+ request.addParameter("h", "version,master");
+ Response response = restClient.performRequest(request);
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
String nodesCatResponse = restTestResponse.getBodyAsString();
String[] split = nodesCatResponse.split("\n");
@@ -310,7 +311,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
Version version = null;
for (int i = 0; i < numHosts; i++) {
//we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster
- Response response = restClient.performRequest("GET", "/");
+ Response response = restClient.performRequest(new Request("GET", "/"));
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
Object latestVersion = restTestResponse.evaluate("version.number");
if (latestVersion == null) {
From d76293f99066db00eb121cb8b470d1f315a51263 Mon Sep 17 00:00:00 2001
From: Michael Basnight
Date: Wed, 11 Jul 2018 09:37:48 -0500
Subject: [PATCH 06/17] Test: fix null failure in watcher test (#31968)
A new commit was merged that does not allow a null attachement &&
text. This is valid for the slack API, as it does not allow this, but
our unit tests did. This commit fixes the broken unit test.
Closes #31948
---
.../watcher/notification/slack/message/SlackMessageTests.java | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
index 740501eec4f..83e2e997839 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java
@@ -461,7 +461,6 @@ public class SlackMessageTests extends ESTestCase {
assertThat(parsed, equalTo(template));
}
- @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31948")
public void testTemplateRender() throws Exception {
Settings settings = SlackMessageDefaultsTests.randomSettings();
SlackMessageDefaults defaults = new SlackMessageDefaults(settings);
@@ -482,7 +481,7 @@ public class SlackMessageTests extends ESTestCase {
if (randomBoolean()) {
templateBuilder.setText(randomAlphaOfLength(10));
}
- if (randomBoolean()) {
+ if (templateBuilder.text == null || randomBoolean()) {
int count = randomIntBetween(0, 3);
for (int i = 0; i < count; i++) {
Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder();
From 51bb27a99114452c07a0b8e51d90c304f443ffb7 Mon Sep 17 00:00:00 2001
From: Jake Landis
Date: Wed, 11 Jul 2018 10:13:41 -0500
Subject: [PATCH 07/17] ingest: date_index_name processor template resolution
(#31841)
This change adds support for template snippet (e.g. {{foo}}) resolution
in the date_index_name processor. The following configuration options
will now resolve a templated value if so configured:
* index_name_prefix (e.g "index_name_prefix": "myindex-{{foo}}-")
* date_rounding (e.g. "date_rounding" : "{{bar}}")
* index_name_format (e.g."index_name_format": "{{baz}}")
---
.../ingest/common/DateIndexNameProcessor.java | 47 +++++++++++++------
.../ingest/common/IngestCommonPlugin.java | 6 +--
.../common/DateIndexNameFactoryTests.java | 19 ++++----
.../common/DateIndexNameProcessorTests.java | 45 ++++++++++++++----
4 files changed, 83 insertions(+), 34 deletions(-)
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
index b44eaa3bfa3..0d6253c88f9 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
@@ -32,6 +32,8 @@ import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.ConfigurationUtils;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
+import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.TemplateScript;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
@@ -42,21 +44,22 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final String TYPE = "date_index_name";
private final String field;
- private final String indexNamePrefix;
- private final String dateRounding;
- private final String indexNameFormat;
+ private final TemplateScript.Factory indexNamePrefixTemplate;
+ private final TemplateScript.Factory dateRoundingTemplate;
+ private final TemplateScript.Factory indexNameFormatTemplate;
private final DateTimeZone timezone;
private final List> dateFormats;
DateIndexNameProcessor(String tag, String field, List> dateFormats, DateTimeZone timezone,
- String indexNamePrefix, String dateRounding, String indexNameFormat) {
+ TemplateScript.Factory indexNamePrefixTemplate, TemplateScript.Factory dateRoundingTemplate,
+ TemplateScript.Factory indexNameFormatTemplate) {
super(tag);
this.field = field;
this.timezone = timezone;
this.dateFormats = dateFormats;
- this.indexNamePrefix = indexNamePrefix;
- this.dateRounding = dateRounding;
- this.indexNameFormat = indexNameFormat;
+ this.indexNamePrefixTemplate = indexNamePrefixTemplate;
+ this.dateRoundingTemplate = dateRoundingTemplate;
+ this.indexNameFormatTemplate = indexNameFormatTemplate;
}
@Override
@@ -83,6 +86,9 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
if (dateTime == null) {
throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException);
}
+ String indexNamePrefix = ingestDocument.renderTemplate(indexNamePrefixTemplate);
+ String indexNameFormat = ingestDocument.renderTemplate(indexNameFormatTemplate);
+ String dateRounding = ingestDocument.renderTemplate(dateRoundingTemplate);
DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat);
StringBuilder builder = new StringBuilder()
@@ -106,16 +112,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
return field;
}
- String getIndexNamePrefix() {
- return indexNamePrefix;
+ TemplateScript.Factory getIndexNamePrefixTemplate() {
+ return indexNamePrefixTemplate;
}
- String getDateRounding() {
- return dateRounding;
+ TemplateScript.Factory getDateRoundingTemplate() {
+ return dateRoundingTemplate;
}
- String getIndexNameFormat() {
- return indexNameFormat;
+ TemplateScript.Factory getIndexNameFormatTemplate() {
+ return indexNameFormatTemplate;
}
DateTimeZone getTimezone() {
@@ -128,6 +134,12 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final class Factory implements Processor.Factory {
+ private final ScriptService scriptService;
+
+ public Factory(ScriptService scriptService) {
+ this.scriptService = scriptService;
+ }
+
@Override
public DateIndexNameProcessor create(Map registry, String tag,
Map config) throws Exception {
@@ -154,9 +166,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", "");
+ TemplateScript.Factory indexNamePrefixTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_prefix", indexNamePrefix, scriptService);
String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding");
+ TemplateScript.Factory dateRoundingTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "date_rounding", dateRounding, scriptService);
String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd");
- return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat);
+ TemplateScript.Factory indexNameFormatTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_format", indexNameFormat, scriptService);
+ return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefixTemplate,
+ dateRoundingTemplate, indexNameFormatTemplate);
}
}
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
index d9878cae9e2..bc475a2a005 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
@@ -73,7 +73,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory());
processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService));
processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory());
- processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory());
+ processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService));
processors.put(SortProcessor.TYPE, new SortProcessor.Factory());
processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters)));
processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService));
@@ -97,12 +97,12 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
Supplier nodesInCluster) {
return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController));
}
-
+
@Override
public List> getSettings() {
return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME);
}
-
+
private static ThreadWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) {
long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis();
long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis();
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
index 3b9e2121c95..2735cf55776 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
@@ -20,18 +20,20 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class DateIndexNameFactoryTests extends ESTestCase {
public void testDefaults() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("field", "_field");
config.put("date_rounding", "y");
@@ -39,14 +41,14 @@ public class DateIndexNameFactoryTests extends ESTestCase {
DateIndexNameProcessor processor = factory.create(null, null, config);
assertThat(processor.getDateFormats().size(), Matchers.equalTo(1));
assertThat(processor.getField(), Matchers.equalTo("_field"));
- assertThat(processor.getIndexNamePrefix(), Matchers.equalTo(""));
- assertThat(processor.getDateRounding(), Matchers.equalTo("y"));
- assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd"));
+ assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo(""));
+ assertThat(processor.getDateRoundingTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("y"));
+ assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyy-MM-dd"));
assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC));
}
public void testSpecifyOptionalSettings() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("field", "_field");
config.put("index_name_prefix", "_prefix");
@@ -63,7 +65,7 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("index_name_format", "yyyyMMdd");
processor = factory.create(null, null, config);
- assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd"));
+ assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyyMMdd"));
config = new HashMap<>();
config.put("field", "_field");
@@ -80,11 +82,11 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("date_rounding", "y");
processor = factory.create(null, null, config);
- assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix"));
+ assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("_prefix"));
}
public void testRequiredFields() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("date_rounding", "y");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
@@ -95,5 +97,4 @@ public class DateIndexNameFactoryTests extends ESTestCase {
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing"));
}
-
}
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
index eba37dc7421..c97da116e34 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
@@ -19,11 +19,14 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ingest.IngestDocument;
+import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
+import org.joda.time.format.DateTimeFormat;
import java.util.Collections;
+import java.util.List;
import java.util.Locale;
import java.util.function.Function;
@@ -33,11 +36,8 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testJodaPattern() throws Exception {
Function function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT);
- DateIndexNameProcessor processor = new DateIndexNameProcessor(
- "_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC,
- "events-", "y", "yyyyMMdd"
- );
-
+ DateIndexNameProcessor processor = createProcessor("_field", Collections.singletonList(function),
+ DateTimeZone.UTC, "events-", "y", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z"));
processor.execute(document);
@@ -46,7 +46,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testTAI64N()throws Exception {
Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"));
@@ -56,7 +56,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnixMs()throws Exception {
Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000500"));
@@ -71,7 +71,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnix()throws Exception {
Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000.5"));
@@ -79,4 +79,33 @@ public class DateIndexNameProcessorTests extends ESTestCase {
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
+ public void testTemplatedFields() throws Exception {
+ String indexNamePrefix = randomAlphaOfLength(10);
+ String dateRounding = randomFrom("y", "M", "w", "d", "h", "m", "s");
+ String indexNameFormat = randomFrom("yyyy-MM-dd'T'HH:mm:ss.SSSZ", "yyyyMMdd", "MM/dd/yyyy");
+ String date = Integer.toString(randomInt());
+ Function dateTimeFunction = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
+
+ DateIndexNameProcessor dateProcessor = createProcessor("_field",
+ Collections.singletonList(dateTimeFunction), DateTimeZone.UTC, indexNamePrefix,
+ dateRounding, indexNameFormat);
+
+ IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
+ Collections.singletonMap("_field", date));
+ dateProcessor.execute(document);
+
+ assertThat(document.getSourceAndMetadata().get("_index"),
+ equalTo("<"+indexNamePrefix+"{"+DateTimeFormat.forPattern(indexNameFormat)
+ .print(dateTimeFunction.apply(date))+"||/"+dateRounding+"{"+indexNameFormat+"|UTC}}>"));
+ }
+
+ private DateIndexNameProcessor createProcessor(String field, List> dateFormats,
+ DateTimeZone timezone, String indexNamePrefix, String dateRounding,
+ String indexNameFormat) {
+ return new DateIndexNameProcessor(randomAlphaOfLength(10), field, dateFormats, timezone,
+ new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix),
+ new TestTemplateService.MockTemplateScript.Factory(dateRounding),
+ new TestTemplateService.MockTemplateScript.Factory(indexNameFormat)
+ );
+ }
}
From 5bcdff73d756814f270794110fb4502d0a2a513a Mon Sep 17 00:00:00 2001
From: James Baiera
Date: Wed, 11 Jul 2018 12:07:31 -0400
Subject: [PATCH 08/17] Add Snapshots Status API to High Level Rest Client
(#31515)
This PR adds the Snapshots Status API to the Snapshot Client, as
well as additional documentation for the status api.
---
.../client/RequestConverters.java | 23 ++-
.../elasticsearch/client/SnapshotClient.java | 31 +++
.../client/RequestConvertersTests.java | 25 +++
.../org/elasticsearch/client/SnapshotIT.java | 32 ++++
.../SnapshotClientDocumentationIT.java | 87 ++++++++-
.../snapshot/snapshots_status.asciidoc | 97 ++++++++++
.../high-level/supported-apis.asciidoc | 2 +
.../status/SnapshotIndexShardStatus.java | 89 ++++++++-
.../snapshots/status/SnapshotIndexStatus.java | 78 +++++++-
.../snapshots/status/SnapshotShardsStats.java | 86 ++++++++-
.../snapshots/status/SnapshotStats.java | 181 +++++++++++++++---
.../snapshots/status/SnapshotStatus.java | 99 +++++++++-
.../status/SnapshotsStatusResponse.java | 34 ++++
.../status/SnapshotIndexShardStatusTests.java | 70 +++++++
.../status/SnapshotIndexStatusTests.java | 64 +++++++
.../status/SnapshotShardsStatsTests.java | 49 +++++
.../snapshots/status/SnapshotStatsTests.java | 52 +++++
.../snapshots/status/SnapshotStatusTests.java | 42 +++-
.../status/SnapshotsStatusResponseTests.java | 57 ++++++
19 files changed, 1153 insertions(+), 45 deletions(-)
create mode 100644 docs/java-rest/high-level/snapshot/snapshots_status.asciidoc
create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java
create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java
create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java
create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java
create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index 60dafd03f9c..126a9c7d4b4 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
@@ -963,6 +964,20 @@ final class RequestConverters {
return request;
}
+ static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
+ String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
+ .addPathPart(snapshotsStatusRequest.repository())
+ .addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
+ .addPathPartAsIs("_status")
+ .build();
+ Request request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ Params parameters = new Params(request);
+ parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
+ parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
+ return request;
+ }
+
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
.addPathPart(deleteSnapshotRequest.repository())
@@ -1262,7 +1277,7 @@ final class RequestConverters {
}
Params withIndicesOptions(IndicesOptions indicesOptions) {
- putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable()));
+ withIgnoreUnavailable(indicesOptions.ignoreUnavailable());
putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
String expandWildcards;
if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) {
@@ -1281,6 +1296,12 @@ final class RequestConverters {
return this;
}
+ Params withIgnoreUnavailable(boolean ignoreUnavailable) {
+ // Always explicitly place the ignore_unavailable value.
+ putParam("ignore_unavailable", Boolean.toString(ignoreUnavailable));
+ return this;
+ }
+
Params withHuman(boolean human) {
if (human) {
putParam("human", Boolean.toString(human));
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
index fa147a338de..bc0bbe95488 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
@@ -30,6 +30,8 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
@@ -221,6 +223,35 @@ public final class SnapshotClient {
GetSnapshotsResponse::fromXContent, listener, emptySet());
}
+ /**
+ * Gets the status of requested snapshots.
+ * See Snapshot and Restore
+ * API on elastic.co
+ * @param snapshotsStatusRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return the response
+ * @throws IOException in case there is a problem sending the request or parsing back the response
+ */
+ public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options)
+ throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
+ SnapshotsStatusResponse::fromXContent, emptySet());
+ }
+
+ /**
+ * Asynchronously gets the status of requested snapshots.
+ * See Snapshot and Restore
+ * API on elastic.co
+ * @param snapshotsStatusRequest the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon request completion
+ */
+ public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options,
+ ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
+ SnapshotsStatusResponse::fromXContent, listener, emptySet());
+ }
+
/**
* Deletes a snapshot.
* See Snapshot and Restore
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index 255554be676..fb4e3b22712 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotReq
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
@@ -175,6 +176,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
+import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@@ -2171,6 +2173,29 @@ public class RequestConvertersTests extends ESTestCase {
assertNull(request.getEntity());
}
+ public void testSnapshotsStatus() {
+ Map expectedParams = new HashMap<>();
+ String repository = randomIndicesNames(1, 1)[0];
+ String[] snapshots = randomIndicesNames(1, 5);
+ StringBuilder snapshotNames = new StringBuilder(snapshots[0]);
+ for (int idx = 1; idx < snapshots.length; idx++) {
+ snapshotNames.append(",").append(snapshots[idx]);
+ }
+ boolean ignoreUnavailable = randomBoolean();
+ String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";
+
+ SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots);
+ setRandomMasterTimeout(snapshotsStatusRequest, expectedParams);
+ snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
+ expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
+
+ Request request = RequestConverters.snapshotsStatus(snapshotsStatusRequest);
+ assertThat(request.getEndpoint(), equalTo(endpoint));
+ assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
+ assertThat(request.getParameters(), equalTo(expectedParams));
+ assertThat(request.getEntity(), is(nullValue()));
+ }
+
public void testDeleteSnapshot() {
Map expectedParams = new HashMap<>();
String repository = randomIndicesNames(1, 1)[0];
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
index 7ec2ee80f04..45f9b5bbb0b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
@@ -28,6 +28,9 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
@@ -43,6 +46,7 @@ import java.util.stream.Collectors;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
public class SnapshotIT extends ESRestHighLevelClientTestCase {
@@ -173,6 +177,34 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
contains("test_snapshot1", "test_snapshot2"));
}
+ public void testSnapshotsStatus() throws IOException {
+ String testRepository = "test";
+ String testSnapshot = "snapshot";
+ String testIndex = "test_index";
+
+ PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}");
+ assertTrue(putRepositoryResponse.isAcknowledged());
+
+ createIndex(testIndex, Settings.EMPTY);
+
+ CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot);
+ createSnapshotRequest.indices(testIndex);
+ createSnapshotRequest.waitForCompletion(true);
+ CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
+ // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
+ assertEquals(RestStatus.OK, createSnapshotResponse.status());
+
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+ request.repository(testRepository);
+ request.snapshots(new String[]{testSnapshot});
+ SnapshotsStatusResponse response = execute(request, highLevelClient().snapshot()::status,
+ highLevelClient().snapshot()::statusAsync);
+ assertThat(response.getSnapshots().size(), equalTo(1));
+ assertThat(response.getSnapshots().get(0).getSnapshot().getRepository(), equalTo(testRepository));
+ assertThat(response.getSnapshots().get(0).getSnapshot().getSnapshotId().getName(), equalTo(testSnapshot));
+ assertThat(response.getSnapshots().get(0).getIndices().containsKey(testIndex), is(true));
+ }
+
public void testDeleteSnapshot() throws IOException {
String repository = "test_repository";
String snapshot = "test_snapshot";
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
index 48d01963e23..403ebc7d774 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -37,11 +37,16 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -84,8 +89,8 @@ import static org.hamcrest.Matchers.equalTo;
public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase {
private static final String repositoryName = "test_repository";
-
private static final String snapshotName = "test_snapshot";
+ private static final String indexName = "test_index";
public void testSnapshotCreateRepository() throws IOException {
RestHighLevelClient client = highLevelClient();
@@ -466,6 +471,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
RestHighLevelClient client = highLevelClient();
createTestRepositories();
+ createTestIndex();
createTestSnapshots();
// tag::get-snapshots-request
@@ -543,10 +549,84 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
+ public void testSnapshotSnapshotsStatus() throws IOException {
+ RestHighLevelClient client = highLevelClient();
+ createTestRepositories();
+ createTestIndex();
+ createTestSnapshots();
+
+ // tag::snapshots-status-request
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+ // end::snapshots-status-request
+
+ // tag::snapshots-status-request-repository
+ request.repository(repositoryName); // <1>
+ // end::snapshots-status-request-repository
+ // tag::snapshots-status-request-snapshots
+ String [] snapshots = new String[] {snapshotName};
+ request.snapshots(snapshots); // <1>
+ // end::snapshots-status-request-snapshots
+ // tag::snapshots-status-request-ignoreUnavailable
+ request.ignoreUnavailable(true); // <1>
+ // end::snapshots-status-request-ignoreUnavailable
+ // tag::snapshots-status-request-masterTimeout
+ request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
+ request.masterNodeTimeout("1m"); // <2>
+ // end::snapshots-status-request-masterTimeout
+
+ // tag::snapshots-status-execute
+ SnapshotsStatusResponse response = client.snapshot().status(request, RequestOptions.DEFAULT);
+ // end::snapshots-status-execute
+
+ // tag::snapshots-status-response
+ List snapshotStatusesResponse = response.getSnapshots();
+ SnapshotStatus snapshotStatus = snapshotStatusesResponse.get(0); // <1>
+ SnapshotsInProgress.State snapshotState = snapshotStatus.getState(); // <2>
+ SnapshotStats shardStats = snapshotStatus.getIndices().get(indexName).getShards().get(0).getStats(); // <3>
+ // end::snapshots-status-response
+ assertThat(snapshotStatusesResponse.size(), equalTo(1));
+ assertThat(snapshotStatusesResponse.get(0).getSnapshot().getRepository(), equalTo(SnapshotClientDocumentationIT.repositoryName));
+ assertThat(snapshotStatusesResponse.get(0).getSnapshot().getSnapshotId().getName(), equalTo(snapshotName));
+ assertThat(snapshotState.completed(), equalTo(true));
+ }
+
+ public void testSnapshotSnapshotsStatusAsync() throws InterruptedException {
+ RestHighLevelClient client = highLevelClient();
+ {
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+
+ // tag::snapshots-status-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(SnapshotsStatusResponse snapshotsStatusResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::snapshots-status-execute-listener
+
+ // Replace the empty listener with a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::snapshots-status-execute-async
+ client.snapshot().statusAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::snapshots-status-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testSnapshotDeleteSnapshot() throws IOException {
RestHighLevelClient client = highLevelClient();
createTestRepositories();
+ createTestIndex();
createTestSnapshots();
// tag::delete-snapshot-request
@@ -608,9 +688,14 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged());
}
+ private void createTestIndex() throws IOException {
+ createIndex(indexName, Settings.EMPTY);
+ }
+
private void createTestSnapshots() throws IOException {
Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName));
createSnapshot.addParameter("wait_for_completion", "true");
+ createSnapshot.setJsonEntity("{\"indices\":\"" + indexName + "\"}");
Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot);
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
assertEquals(200, response.getStatusLine().getStatusCode());
diff --git a/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc
new file mode 100644
index 00000000000..8f91d774f4e
--- /dev/null
+++ b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc
@@ -0,0 +1,97 @@
+[[java-rest-high-snapshot-snapshots-status]]
+=== Snapshots Status API
+
+The Snapshots Status API allows to retrieve detailed information about snapshots in progress.
+
+[[java-rest-high-snapshot-snapshots-status-request]]
+==== Snapshots Status Request
+
+A `SnapshotsStatusRequest`:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request]
+--------------------------------------------------
+
+==== Required Arguments
+The following arguments must be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-repository]
+--------------------------------------------------
+<1> Sets the repository to check for snapshot statuses
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-snapshots]
+--------------------------------------------------
+<1> The list of snapshot names to check the status of
+
+==== Optional Arguments
+The following arguments can optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-ignoreUnavailable]
+--------------------------------------------------
+<1> The command will fail if some of the snapshots are unavailable. The `ignore_unavailable` flag
+set to true will return all snapshots that are currently available.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-masterTimeout]
+--------------------------------------------------
+<1> Timeout to connect to the master node as a `TimeValue`
+<2> Timeout to connect to the master node as a `String`
+
+[[java-rest-high-snapshot-snapshots-status-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute]
+--------------------------------------------------
+
+[[java-rest-high-snapshot-snapshots-status-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of retrieving snapshot statuses requires both the
+`SnapshotsStatusRequest` instance and an `ActionListener` instance to be
+passed to the asynchronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-async]
+--------------------------------------------------
+<1> The `SnapshotsStatusRequest` to execute and the `ActionListener`
+to use when the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `SnapshotsStatusResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of a failure. The raised exception is provided as an argument
+
+[[java-rest-high-snapshot-snapshots-status-response]]
+==== Snapshots Status Response
+
+The returned `SnapshotsStatusResponse` allows to retrieve information about the
+executed operation as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-response]
+--------------------------------------------------
+<1> Request contains a list of snapshot statuses
+<2> Each status contains information about the snapshot
+<3> Example of reading snapshot statistics about a specific index and shard
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index e69f53eb4ba..cf38040e865 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -154,6 +154,7 @@ The Java High Level REST Client supports the following Snapshot APIs:
* <>
* <>
* <>
+* <>
* <>
include::snapshot/get_repository.asciidoc[]
@@ -162,6 +163,7 @@ include::snapshot/delete_repository.asciidoc[]
include::snapshot/verify_repository.asciidoc[]
include::snapshot/create_snapshot.asciidoc[]
include::snapshot/get_snapshots.asciidoc[]
+include::snapshot/snapshots_status.asciidoc[]
include::snapshot/delete_snapshot.asciidoc[]
== Tasks APIs
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
index 39abd8613ca..834e238e4a0 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
@@ -19,16 +19,27 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import java.io.IOException;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContentFragment {
private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT;
@@ -80,6 +91,14 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
this.nodeId = nodeId;
}
+ SnapshotIndexShardStatus(ShardId shardId, SnapshotIndexShardStage stage, SnapshotStats stats, String nodeId, String failure) {
+ super(shardId);
+ this.stage = stage;
+ this.stats = stats;
+ this.nodeId = nodeId;
+ this.failure = failure;
+ }
+
/**
* Returns snapshot stage
*/
@@ -143,7 +162,7 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Integer.toString(getShardId().getId()));
builder.field(Fields.STAGE, getStage());
- stats.toXContent(builder, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
if (getNodeId() != null) {
builder.field(Fields.NODE, getNodeId());
}
@@ -153,4 +172,72 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
builder.endObject();
return builder;
}
+
+ static final ObjectParser.NamedObjectParser PARSER;
+ static {
+ ConstructingObjectParser innerParser = new ConstructingObjectParser<>(
+ "snapshot_index_shard_status", true,
+ (Object[] parsedObjects, ShardId shard) -> {
+ int i = 0;
+ String rawStage = (String) parsedObjects[i++];
+ String nodeId = (String) parsedObjects[i++];
+ String failure = (String) parsedObjects[i++];
+ SnapshotStats stats = (SnapshotStats) parsedObjects[i];
+
+ SnapshotIndexShardStage stage;
+ try {
+ stage = SnapshotIndexShardStage.valueOf(rawStage);
+ } catch (IllegalArgumentException iae) {
+ throw new ElasticsearchParseException(
+ "failed to parse snapshot index shard status [{}][{}], unknonwn stage [{}]",
+ shard.getIndex().getName(), shard.getId(), rawStage);
+ }
+ return new SnapshotIndexShardStatus(shard, stage, stats, nodeId, failure);
+ }
+ );
+ innerParser.declareString(constructorArg(), new ParseField(Fields.STAGE));
+ innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.NODE));
+ innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.REASON));
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS));
+ PARSER = (p, indexId, shardName) -> {
+ // Combine the index name in the context with the shard name passed in for the named object parser
+ // into a ShardId to pass as context for the inner parser.
+ int shard;
+ try {
+ shard = Integer.parseInt(shardName);
+ } catch (NumberFormatException nfe) {
+ throw new ElasticsearchParseException(
+ "failed to parse snapshot index shard status [{}], expected numeric shard id but got [{}]", indexId, shardName);
+ }
+ ShardId shardId = new ShardId(new Index(indexId, IndexMetaData.INDEX_UUID_NA_VALUE), shard);
+ return innerParser.parse(p, shardId);
+ };
+ }
+
+ public static SnapshotIndexShardStatus fromXContent(XContentParser parser, String indexId) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
+ return PARSER.parse(parser, indexId, parser.currentName());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotIndexShardStatus that = (SnapshotIndexShardStatus) o;
+
+ if (stage != that.stage) return false;
+ if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
+ if (nodeId != null ? !nodeId.equals(that.nodeId) : that.nodeId != null) return false;
+ return failure != null ? failure.equals(that.failure) : that.failure == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = stage != null ? stage.hashCode() : 0;
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
+ result = 31 * result + (failure != null ? failure.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
index 1605e41dc61..ba858495980 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
@@ -19,17 +19,24 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
-import org.elasticsearch.common.xcontent.ToXContent.Params;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
+import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Represents snapshot status of all shards in the index
@@ -57,6 +64,14 @@ public class SnapshotIndexStatus implements Iterable,
this.indexShards = unmodifiableMap(indexShards);
}
+ public SnapshotIndexStatus(String index, Map indexShards, SnapshotShardsStats shardsStats,
+ SnapshotStats stats) {
+ this.index = index;
+ this.indexShards = indexShards;
+ this.shardsStats = shardsStats;
+ this.stats = stats;
+ }
+
/**
* Returns the index name
*/
@@ -97,8 +112,8 @@ public class SnapshotIndexStatus implements Iterable,
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getIndex());
- shardsStats.toXContent(builder, params);
- stats.toXContent(builder, params);
+ builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(Fields.SHARDS);
for (SnapshotIndexShardStatus shard : indexShards.values()) {
shard.toXContent(builder, params);
@@ -107,4 +122,61 @@ public class SnapshotIndexStatus implements Iterable,
builder.endObject();
return builder;
}
+
+ static final ObjectParser.NamedObjectParser PARSER;
+ static {
+ ConstructingObjectParser innerParser = new ConstructingObjectParser<>(
+ "snapshot_index_status", true,
+ (Object[] parsedObjects, String index) -> {
+ int i = 0;
+ SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
+ SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
+ @SuppressWarnings("unchecked") List shardStatuses =
+ (List) parsedObjects[i];
+
+ final Map indexShards;
+ if (shardStatuses == null || shardStatuses.isEmpty()) {
+ indexShards = emptyMap();
+ } else {
+ indexShards = new HashMap<>(shardStatuses.size());
+ for (SnapshotIndexShardStatus shardStatus : shardStatuses) {
+ indexShards.put(shardStatus.getShardId().getId(), shardStatus);
+ }
+ }
+ return new SnapshotIndexStatus(index, indexShards, shardsStats, stats);
+ });
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotShardsStats.PARSER.apply(p, null),
+ new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p),
+ new ParseField(SnapshotStats.Fields.STATS));
+ innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS));
+ PARSER = ((p, c, name) -> innerParser.apply(p, name));
+ }
+
+ public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
+ return PARSER.parse(parser, null, parser.currentName());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotIndexStatus that = (SnapshotIndexStatus) o;
+
+ if (index != null ? !index.equals(that.index) : that.index != null) return false;
+ if (indexShards != null ? !indexShards.equals(that.indexShards) : that.indexShards != null) return false;
+ if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
+ return stats != null ? stats.equals(that.stats) : that.stats == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = index != null ? index.hashCode() : 0;
+ result = 31 * result + (indexShards != null ? indexShards.hashCode() : 0);
+ result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
index c74dd5af1ee..c0ac432292d 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
@@ -19,17 +19,22 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.ToXContentFragment;
+import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collection;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
/**
* Status of a snapshot shards
*/
-public class SnapshotShardsStats implements ToXContentFragment {
+public class SnapshotShardsStats implements ToXContentObject {
private int initializingShards;
private int startedShards;
@@ -63,6 +68,16 @@ public class SnapshotShardsStats implements ToXContentFragment {
}
}
+ public SnapshotShardsStats(int initializingShards, int startedShards, int finalizingShards, int doneShards, int failedShards,
+ int totalShards) {
+ this.initializingShards = initializingShards;
+ this.startedShards = startedShards;
+ this.finalizingShards = finalizingShards;
+ this.doneShards = doneShards;
+ this.failedShards = failedShards;
+ this.totalShards = totalShards;
+ }
+
/**
* Number of shards with the snapshot in the initializing stage
*/
@@ -117,15 +132,68 @@ public class SnapshotShardsStats implements ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject(Fields.SHARDS_STATS);
- builder.field(Fields.INITIALIZING, getInitializingShards());
- builder.field(Fields.STARTED, getStartedShards());
- builder.field(Fields.FINALIZING, getFinalizingShards());
- builder.field(Fields.DONE, getDoneShards());
- builder.field(Fields.FAILED, getFailedShards());
- builder.field(Fields.TOTAL, getTotalShards());
+ builder.startObject();
+ {
+ builder.field(Fields.INITIALIZING, getInitializingShards());
+ builder.field(Fields.STARTED, getStartedShards());
+ builder.field(Fields.FINALIZING, getFinalizingShards());
+ builder.field(Fields.DONE, getDoneShards());
+ builder.field(Fields.FAILED, getFailedShards());
+ builder.field(Fields.TOTAL, getTotalShards());
+ }
builder.endObject();
return builder;
}
+ static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ Fields.SHARDS_STATS, true,
+ (Object[] parsedObjects) -> {
+ int i = 0;
+ int initializingShards = (int) parsedObjects[i++];
+ int startedShards = (int) parsedObjects[i++];
+ int finalizingShards = (int) parsedObjects[i++];
+ int doneShards = (int) parsedObjects[i++];
+ int failedShards = (int) parsedObjects[i++];
+ int totalShards = (int) parsedObjects[i];
+ return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
+ }
+ );
+ static {
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL));
+ }
+
+ public static SnapshotShardsStats fromXContent(XContentParser parser) throws IOException {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotShardsStats that = (SnapshotShardsStats) o;
+
+ if (initializingShards != that.initializingShards) return false;
+ if (startedShards != that.startedShards) return false;
+ if (finalizingShards != that.finalizingShards) return false;
+ if (doneShards != that.doneShards) return false;
+ if (failedShards != that.failedShards) return false;
+ return totalShards == that.totalShards;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = initializingShards;
+ result = 31 * result + startedShards;
+ result = 31 * result + finalizingShards;
+ result = 31 * result + doneShards;
+ result = 31 * result + failedShards;
+ result = 31 * result + totalShards;
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
index 76f6b219184..6cb56bd88dc 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
@@ -26,12 +26,14 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.ToXContentFragment;
+import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
-public class SnapshotStats implements Streamable, ToXContentFragment {
+public class SnapshotStats implements Streamable, ToXContentObject {
private long startTime;
private long time;
@@ -176,35 +178,132 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject(Fields.STATS)
- // incremental starts
- .startObject(Fields.INCREMENTAL)
- .field(Fields.FILE_COUNT, getIncrementalFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()))
- // incremental ends
- .endObject();
+ builder.startObject();
+ {
+ builder.startObject(Fields.INCREMENTAL);
+ {
+ builder.field(Fields.FILE_COUNT, getIncrementalFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()));
+ }
+ builder.endObject();
- if (getProcessedFileCount() != getIncrementalFileCount()) {
- // processed starts
- builder.startObject(Fields.PROCESSED)
- .field(Fields.FILE_COUNT, getProcessedFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()))
- // processed ends
- .endObject();
+ if (getProcessedFileCount() != getIncrementalFileCount()) {
+ builder.startObject(Fields.PROCESSED);
+ {
+ builder.field(Fields.FILE_COUNT, getProcessedFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()));
+ }
+ builder.endObject();
+ }
+
+ builder.startObject(Fields.TOTAL);
+ {
+ builder.field(Fields.FILE_COUNT, getTotalFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()));
+ }
+ builder.endObject();
+
+ // timings stats
+ builder.field(Fields.START_TIME_IN_MILLIS, getStartTime());
+ builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
}
- // total starts
- builder.startObject(Fields.TOTAL)
- .field(Fields.FILE_COUNT, getTotalFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()))
- // total ends
- .endObject();
- // timings stats
- builder.field(Fields.START_TIME_IN_MILLIS, getStartTime())
- .humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
-
return builder.endObject();
}
+ public static SnapshotStats fromXContent(XContentParser parser) throws IOException {
+ // Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the
+ // object has historically been written as JSON versus how it is structured in Java.
+ XContentParser.Token token = parser.currentToken();
+ if (token == null) {
+ token = parser.nextToken();
+ }
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ long startTime = 0;
+ long time = 0;
+ int incrementalFileCount = 0;
+ int totalFileCount = 0;
+ int processedFileCount = 0;
+ long incrementalSize = 0;
+ long totalSize = 0;
+ long processedSize = 0;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String currentName = parser.currentName();
+ token = parser.nextToken();
+ if (currentName.equals(Fields.INCREMENTAL)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ incrementalFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ incrementalSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.PROCESSED)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ processedFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ processedSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.TOTAL)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ totalFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ totalSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.START_TIME_IN_MILLIS)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ startTime = parser.longValue();
+ } else if (currentName.equals(Fields.TIME_IN_MILLIS)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ time = parser.longValue();
+ } else {
+ // Unknown field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount, processedFileCount, incrementalSize, totalSize,
+ processedSize);
+ }
+
void add(SnapshotStats stats) {
incrementalFileCount += stats.incrementalFileCount;
totalFileCount += stats.totalFileCount;
@@ -229,4 +328,34 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
time = endTime - startTime;
}
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotStats that = (SnapshotStats) o;
+
+ if (startTime != that.startTime) return false;
+ if (time != that.time) return false;
+ if (incrementalFileCount != that.incrementalFileCount) return false;
+ if (totalFileCount != that.totalFileCount) return false;
+ if (processedFileCount != that.processedFileCount) return false;
+ if (incrementalSize != that.incrementalSize) return false;
+ if (totalSize != that.totalSize) return false;
+ return processedSize == that.processedSize;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = (int) (startTime ^ (startTime >>> 32));
+ result = 31 * result + (int) (time ^ (time >>> 32));
+ result = 31 * result + incrementalFileCount;
+ result = 31 * result + totalFileCount;
+ result = 31 * result + processedFileCount;
+ result = 31 * result + (int) (incrementalSize ^ (incrementalSize >>> 32));
+ result = 31 * result + (int) (totalSize ^ (totalSize >>> 32));
+ result = 31 * result + (int) (processedSize ^ (processedSize >>> 32));
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
index f7545ea0236..618bb54c901 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
@@ -20,15 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.Version;
+import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.SnapshotsInProgress.State;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.snapshots.Snapshot;
+import org.elasticsearch.snapshots.SnapshotId;
import java.io.IOException;
import java.util.ArrayList;
@@ -40,7 +46,11 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Status of a snapshot
@@ -72,6 +82,18 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
updateShardStats();
}
+ private SnapshotStatus(Snapshot snapshot, State state, List shards,
+ Map indicesStatus, SnapshotShardsStats shardsStats,
+ SnapshotStats stats, Boolean includeGlobalState) {
+ this.snapshot = snapshot;
+ this.state = state;
+ this.shards = shards;
+ this.indicesStatus = indicesStatus;
+ this.shardsStats = shardsStats;
+ this.stats = stats;
+ this.includeGlobalState = includeGlobalState;
+ }
+
SnapshotStatus() {
}
@@ -207,8 +229,8 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
if (includeGlobalState != null) {
builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState);
}
- shardsStats.toXContent(builder, params);
- stats.toXContent(builder, params);
+ builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(INDICES);
for (SnapshotIndexStatus indexStatus : getIndices().values()) {
indexStatus.toXContent(builder, params);
@@ -218,6 +240,52 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
return builder;
}
+ static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "snapshot_status", true,
+ (Object[] parsedObjects) -> {
+ int i = 0;
+ String name = (String) parsedObjects[i++];
+ String repository = (String) parsedObjects[i++];
+ String uuid = (String) parsedObjects[i++];
+ String rawState = (String) parsedObjects[i++];
+ Boolean includeGlobalState = (Boolean) parsedObjects[i++];
+ SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
+ SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
+ @SuppressWarnings("unchecked") List indices = ((List) parsedObjects[i]);
+
+ Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid));
+ SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState);
+ Map indicesStatus;
+ List shards;
+ if (indices == null || indices.isEmpty()) {
+ indicesStatus = emptyMap();
+ shards = emptyList();
+ } else {
+ indicesStatus = new HashMap<>(indices.size());
+ shards = new ArrayList<>();
+ for (SnapshotIndexStatus index : indices) {
+ indicesStatus.put(index.getIndex(), index);
+ shards.addAll(index.getShards().values());
+ }
+ }
+ return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState);
+ });
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT));
+ PARSER.declareString(constructorArg(), new ParseField(REPOSITORY));
+ PARSER.declareString(constructorArg(), new ParseField(UUID));
+ PARSER.declareString(constructorArg(), new ParseField(STATE));
+ PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE));
+ PARSER.declareField(constructorArg(), SnapshotStats::fromXContent, new ParseField(SnapshotStats.Fields.STATS),
+ ObjectParser.ValueType.OBJECT);
+ PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
+ PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES));
+ }
+
+ public static SnapshotStatus fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
private void updateShardStats() {
stats = new SnapshotStats();
shardsStats = new SnapshotShardsStats(shards);
@@ -225,4 +293,31 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
stats.add(shard.getStats());
}
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotStatus that = (SnapshotStatus) o;
+
+ if (snapshot != null ? !snapshot.equals(that.snapshot) : that.snapshot != null) return false;
+ if (state != that.state) return false;
+ if (indicesStatus != null ? !indicesStatus.equals(that.indicesStatus) : that.indicesStatus != null)
+ return false;
+ if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
+ if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
+ return includeGlobalState != null ? includeGlobalState.equals(that.includeGlobalState) : that.includeGlobalState == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = snapshot != null ? snapshot.hashCode() : 0;
+ result = 31 * result + (state != null ? state.hashCode() : 0);
+ result = 31 * result + (indicesStatus != null ? indicesStatus.hashCode() : 0);
+ result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ result = 31 * result + (includeGlobalState != null ? includeGlobalState.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
index d44a490680c..ef1435e4108 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
@@ -20,16 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
/**
* Snapshot status response
*/
@@ -85,4 +90,33 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten
return builder;
}
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "snapshots_status_response", true,
+ (Object[] parsedObjects) -> {
+ @SuppressWarnings("unchecked") List snapshots = (List) parsedObjects[0];
+ return new SnapshotsStatusResponse(snapshots);
+ }
+ );
+ static {
+ PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots"));
+ }
+
+ public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotsStatusResponse response = (SnapshotsStatusResponse) o;
+
+ return snapshots != null ? snapshots.equals(response.snapshots) : response.snapshots == null;
+ }
+
+ @Override
+ public int hashCode() {
+ return snapshots != null ? snapshots.hashCode() : 0;
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java
new file mode 100644
index 00000000000..490319ef840
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.function.Predicate;
+
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotIndexShardStatusTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotIndexShardStatus createTestInstance() {
+ return createForIndex(randomAlphaOfLength(10));
+ }
+
+ protected SnapshotIndexShardStatus createForIndex(String indexName) {
+ ShardId shardId = new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomIntBetween(0, 500));
+ SnapshotIndexShardStage stage = randomFrom(SnapshotIndexShardStage.values());
+ SnapshotStats stats = new SnapshotStatsTests().createTestInstance();
+ String nodeId = randomAlphaOfLength(20);
+ String failure = null;
+ if (rarely()) {
+ failure = randomAlphaOfLength(200);
+ }
+ return new SnapshotIndexShardStatus(shardId, stage, stats, nodeId, failure);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the root object since its fields correspond to shard names.
+ return String::isEmpty;
+ }
+
+ @Override
+ protected SnapshotIndexShardStatus doParseInstance(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
+ SnapshotIndexShardStatus status = SnapshotIndexShardStatus.fromXContent(parser, parser.currentName());
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ return status;
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java
new file mode 100644
index 00000000000..92eb355f3a6
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+
+public class SnapshotIndexStatusTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotIndexStatus createTestInstance() {
+ String index = randomAlphaOfLength(10);
+ List shardStatuses = new ArrayList<>();
+ SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
+ for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
+ shardStatuses.add(builder.createForIndex(index));
+ }
+ return new SnapshotIndexStatus(index, shardStatuses);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the root object or the shards field since their fields correspond to names.
+ return (s) -> s.isEmpty() || s.endsWith("shards");
+ }
+
+ @Override
+ protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
+ SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ return status;
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java
new file mode 100644
index 00000000000..ac00896983d
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotShardsStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotShardsStats createTestInstance() {
+ int initializingShards = randomInt();
+ int startedShards = randomInt();
+ int finalizingShards = randomInt();
+ int doneShards = randomInt();
+ int failedShards = randomInt();
+ int totalShards = randomInt();
+ return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
+ }
+
+ @Override
+ protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotShardsStats.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java
new file mode 100644
index 00000000000..2822a9661fd
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotStats createTestInstance() {
+ long startTime = randomNonNegativeLong();
+ long time = randomNonNegativeLong();
+ int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ long incrementalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ long totalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ long processedSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount,
+ processedFileCount, incrementalSize, totalSize, processedSize);
+ }
+
+ @Override
+ protected SnapshotStats doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotStats.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
index 3ece0f9f107..dbd45640c7b 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
@@ -21,16 +21,19 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.common.UUIDs;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
-import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.AbstractXContentTestCase;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.function.Predicate;
-public class SnapshotStatusTests extends ESTestCase {
+public class SnapshotStatusTests extends AbstractXContentTestCase {
public void testToString() throws Exception {
@@ -146,4 +149,39 @@ public class SnapshotStatusTests extends ESTestCase {
"}";
assertEquals(expected, status.toString());
}
+
+ @Override
+ protected SnapshotStatus createTestInstance() {
+ SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values());
+ String uuid = UUIDs.randomBase64UUID();
+ SnapshotId id = new SnapshotId("test-snap", uuid);
+ Snapshot snapshot = new Snapshot("test-repo", id);
+
+ SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
+ builder.createTestInstance();
+
+ List snapshotIndexShardStatuses = new ArrayList<>();
+ for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
+ SnapshotIndexShardStatus snapshotIndexShardStatus = builder.createTestInstance();
+ snapshotIndexShardStatuses.add(snapshotIndexShardStatus);
+ }
+ boolean includeGlobalState = randomBoolean();
+ return new SnapshotStatus(snapshot, state, snapshotIndexShardStatuses, includeGlobalState);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the indices field or shards field since their fields correspond to names.
+ return (s) -> s.endsWith("shards") || s.endsWith("indices");
+ }
+
+ @Override
+ protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotStatus.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java
new file mode 100644
index 00000000000..d1ad028296d
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotsStatusResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotsStatusResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the indices field or shards field since their fields correspond to names.
+ return (s) -> s.endsWith("shards") || s.endsWith("indices");
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected SnapshotsStatusResponse createTestInstance() {
+ SnapshotStatusTests statusBuilder = new SnapshotStatusTests();
+ List snapshotStatuses = new ArrayList<>();
+ for (int idx = 0; idx < randomIntBetween(0, 5); idx++) {
+ snapshotStatuses.add(statusBuilder.createTestInstance());
+ }
+ return new SnapshotsStatusResponse(snapshotStatuses);
+ }
+}
From efcfd0d8275c68b9410c771aacd6781f866fb473 Mon Sep 17 00:00:00 2001
From: Lisa Cawley
Date: Wed, 11 Jul 2018 09:08:32 -0700
Subject: [PATCH 09/17] [DOCS] Removes alternative docker pull example (#31934)
---
docs/reference/setup/install/docker.asciidoc | 12 +++++-------
1 file changed, 5 insertions(+), 7 deletions(-)
diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc
index b18f7c57a16..523217b921a 100644
--- a/docs/reference/setup/install/docker.asciidoc
+++ b/docs/reference/setup/install/docker.asciidoc
@@ -8,8 +8,6 @@ A list of all published Docker images and tags can be found in
https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found
on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub].
-==== Image types
-
These images are free to use under the Elastic license. They contain open source
and free commercial features and access to paid commercial features.
{xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the
@@ -17,9 +15,6 @@ paid commercial features. See the
https://www.elastic.co/subscriptions[Subscriptions] page for information about
Elastic license levels.
-Alternatively, you can download `-oss` images, which contain only features that
-are available under the Apache 2.0 license.
-
==== Pulling the image
Obtaining {es} for Docker is as simple as issuing a +docker pull+ command
@@ -34,14 +29,17 @@ endif::[]
ifeval::["{release-state}"!="unreleased"]
-Docker images can be retrieved with the following commands:
+For example, the Docker image can be retrieved with the following command:
["source","sh",subs="attributes"]
--------------------------------------------
docker pull {docker-repo}:{version}
-docker pull {docker-repo}-oss:{version}
--------------------------------------------
+Alternatively, you can download other Docker images that contain only features
+that are available under the Apache 2.0 license from
+https://www.docker.elastic.co[www.docker.elastic.co].
+
endif::[]
[[docker-cli-run]]
From aedbfc63cdb37f8f5735b06364d3983fbcb7d92e Mon Sep 17 00:00:00 2001
From: Clinton Gormley
Date: Wed, 11 Jul 2018 20:17:05 +0200
Subject: [PATCH 10/17] Docs: Added note about cloud service to installation
and getting started
---
docs/reference/getting-started.asciidoc | 5 +++++
docs/reference/setup/install.asciidoc | 7 ++++++-
2 files changed, 11 insertions(+), 1 deletion(-)
diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index 39006d1ab53..a29a743fed8 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -104,6 +104,11 @@ With that out of the way, let's get started with the fun part...
== Installation
+You can skip installation completely by using our hosted
+Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
+available on AWS and GCP. You can
+https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed):
[source,sh]
diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc
index 783cb804e7a..7675e5ad146 100644
--- a/docs/reference/setup/install.asciidoc
+++ b/docs/reference/setup/install.asciidoc
@@ -1,6 +1,11 @@
[[install-elasticsearch]]
== Installing Elasticsearch
+Elasticsearch can be run on your own hardware or using our hosted
+Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
+available on AWS and GCP. You can
+https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
Elasticsearch is provided in the following package formats:
[horizontal]
@@ -38,7 +43,7 @@ Elasticsearch on Windows. MSIs may be downloaded from the Elasticsearch website.
`docker`::
Images are available for running Elasticsearch as Docker containers. They may be
-downloaded from the Elastic Docker Registry.
+downloaded from the Elastic Docker Registry.
+
{ref}/docker.html[Install {es} with Docker]
From 939983d783d7f17042c6cdc14bbf3e18220e3196 Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Wed, 11 Jul 2018 14:42:55 -0400
Subject: [PATCH 11/17] Switch reindex tests to new style requests (#31941)
In #29623 we added `Request` object flavored requests to the low level
REST client and in #30315 we deprecated the old `performRequest`s. This
changes all calls in the `modules/reindex` project to use the new
versions.
---
.../index/reindex/ManyDocumentsIT.java | 65 ++++++++++-------
.../remote/ReindexFromOldRemoteIT.java | 70 ++++++++-----------
2 files changed, 69 insertions(+), 66 deletions(-)
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
index e9082c96fd1..6aa1046492c 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
@@ -19,19 +19,13 @@
package org.elasticsearch.index.reindex;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
-import org.elasticsearch.client.Response;
-import org.elasticsearch.common.xcontent.XContentHelper;
-import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.client.Request;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Map;
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.hasEntry;
/**
@@ -50,48 +44,69 @@ public class ManyDocumentsIT extends ESRestTestCase {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
- client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"),
- new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
+ Request request = new Request("POST", "/test/test/_bulk");
+ request.addParameter("refresh", "true");
+ request.setJsonEntity(bulk.toString());
+ client().performRequest(request);
}
public void testReindex() throws IOException {
- Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity(
- "{\"source\":{\"index\":\"test\"}, \"dest\":{\"index\":\"des\"}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/_reindex");
+ request.setJsonEntity(
+ "{\n" +
+ " \"source\":{\n" +
+ " \"index\":\"test\"\n" +
+ " },\n" +
+ " \"dest\":{\n" +
+ " \"index\":\"des\"\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count));
}
public void testReindexFromRemote() throws IOException {
- Map, ?> nodesInfo = toMap(client().performRequest("GET", "/_nodes/http"));
+ Map, ?> nodesInfo = entityAsMap(client().performRequest(new Request("GET", "/_nodes/http")));
nodesInfo = (Map, ?>) nodesInfo.get("nodes");
Map, ?> nodeInfo = (Map, ?>) nodesInfo.values().iterator().next();
Map, ?> http = (Map, ?>) nodeInfo.get("http");
String remote = "http://"+ http.get("publish_address");
- Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity(
- "{\"source\":{\"index\":\"test\",\"remote\":{\"host\":\"" + remote + "\"}}, \"dest\":{\"index\":\"des\"}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/_reindex");
+ request.setJsonEntity(
+ "{\n" +
+ " \"source\":{\n" +
+ " \"index\":\"test\",\n" +
+ " \"remote\":{\n" +
+ " \"host\":\"" + remote + "\"\n" +
+ " }\n" +
+ " }\n," +
+ " \"dest\":{\n" +
+ " \"index\":\"des\"\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count));
}
public void testUpdateByQuery() throws IOException {
- Map response = toMap(client().performRequest("POST", "/test/_update_by_query"));
+ Map response = entityAsMap(client().performRequest(new Request("POST", "/test/_update_by_query")));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("updated", count));
}
public void testDeleteByQuery() throws IOException {
- Map response = toMap(client().performRequest("POST", "/test/_delete_by_query", emptyMap(), new StringEntity(
- "{\"query\":{\"match_all\":{}}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/test/_delete_by_query");
+ request.setJsonEntity(
+ "{\n" +
+ " \"query\":{\n" +
+ " \"match_all\": {}\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("deleted", count));
}
-
- static Map toMap(Response response) throws IOException {
- return XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false);
- }
-
}
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
index 5d359053a66..9feed83595f 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
@@ -19,25 +19,24 @@
package org.elasticsearch.index.reindex.remote;
-import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
-import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
public class ReindexFromOldRemoteIT extends ESRestTestCase {
+ /**
+ * Number of documents to test when reindexing from an old version.
+ */
+ private static final int DOCS = 5;
+
private void oldEsTestCase(String portPropertyName, String requestsPerSecond) throws IOException {
boolean enabled = Booleans.parseBoolean(System.getProperty("tests.fromOld"));
assumeTrue("test is disabled, probably because this is windows", enabled);
@@ -45,17 +44,19 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
int oldEsPort = Integer.parseInt(System.getProperty(portPropertyName));
try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) {
try {
- HttpEntity entity = new StringEntity("{\"settings\":{\"number_of_shards\": 1}}", ContentType.APPLICATION_JSON);
- oldEs.performRequest("PUT", "/test", singletonMap("refresh", "true"), entity);
+ Request createIndex = new Request("PUT", "/test");
+ createIndex.setJsonEntity("{\"settings\":{\"number_of_shards\": 1}}");
+ oldEs.performRequest(createIndex);
- entity = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON);
- oldEs.performRequest("PUT", "/test/doc/testdoc1", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc2", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc3", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc4", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc5", singletonMap("refresh", "true"), entity);
+ for (int i = 0; i < DOCS; i++) {
+ Request doc = new Request("PUT", "/test/doc/testdoc" + i);
+ doc.addParameter("refresh", "true");
+ doc.setJsonEntity("{\"test\":\"test\"}");
+ oldEs.performRequest(doc);
+ }
- entity = new StringEntity(
+ Request reindex = new Request("POST", "/_reindex");
+ reindex.setJsonEntity(
"{\n"
+ " \"source\":{\n"
+ " \"index\": \"test\",\n"
@@ -67,36 +68,23 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
+ " \"dest\": {\n"
+ " \"index\": \"test\"\n"
+ " }\n"
- + "}",
- ContentType.APPLICATION_JSON);
- Map params = new TreeMap<>();
- params.put("refresh", "true");
- params.put("pretty", "true");
+ + "}");
+ reindex.addParameter("refresh", "true");
+ reindex.addParameter("pretty", "true");
if (requestsPerSecond != null) {
- params.put("requests_per_second", requestsPerSecond);
+ reindex.addParameter("requests_per_second", requestsPerSecond);
}
- client().performRequest("POST", "/_reindex", params, entity);
+ client().performRequest(reindex);
- Response response = client().performRequest("POST", "test/_search", singletonMap("pretty", "true"));
+ Request search = new Request("POST", "/test/_search");
+ search.addParameter("pretty", "true");
+ Response response = client().performRequest(search);
String result = EntityUtils.toString(response.getEntity());
- assertThat(result, containsString("\"_id\" : \"testdoc1\""));
- } finally {
- try {
- oldEs.performRequest("DELETE", "/test");
- } catch (ResponseException e) {
- /* Try not to throw ResponseException for as it'll eat the
- * real exception. This is because the rest client throws
- * exceptions in a "funny" way that isn't compatible with
- * `suppressed`. In the case of 404s we'll just log something
- * and move on because that just means that a previous
- * failure caused the index not to be created. */
- if (e.getResponse().getStatusLine().getStatusCode() == 404) {
- logger.warn("old index not deleted because it doesn't exist");
- } else {
- logger.error("failed to remove old index", e);
- fail("failed to remove old index, see log");
- }
+ for (int i = 0; i < DOCS; i++) {
+ assertThat(result, containsString("\"_id\" : \"testdoc" + i + "\""));
}
+ } finally {
+ oldEs.performRequest(new Request("DELETE", "/test"));
}
}
}
From b83e99a824cf5924eaf87beb9193c14588b9f84f Mon Sep 17 00:00:00 2001
From: Nik Everett
Date: Wed, 11 Jul 2018 14:52:45 -0400
Subject: [PATCH 12/17] Switch url repository rest tests to new style requests
(#31944)
In #29623 we added `Request` object flavored requests to the low level
REST client and in #30315 we deprecated the old `performRequest`s. This
changes all calls in the `module/repository-url` project to use the new
versions.
---
.../RepositoryURLClientYamlTestSuiteIT.java | 27 ++++++++++++-------
1 file changed, 17 insertions(+), 10 deletions(-)
diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
index f33fa98f0e3..65d9b87b07d 100644
--- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
+++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
@@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils;
@@ -44,7 +45,6 @@ import java.net.URL;
import java.util.List;
import java.util.Map;
-import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
@@ -70,8 +70,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
**/
@Before
public void registerRepositories() throws IOException {
- Response clusterSettingsResponse = client().performRequest("GET", "/_cluster/settings?include_defaults=true" +
- "&filter_path=defaults.path.repo,defaults.repositories.url.allowed_urls");
+ Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
+ clusterSettingsRequest.addParameter("include_defaults", "true");
+ clusterSettingsRequest.addParameter("filter_path", "defaults.path.repo,defaults.repositories.url.allowed_urls");
+ Response clusterSettingsResponse = client().performRequest(clusterSettingsRequest);
Map clusterSettings = entityAsMap(clusterSettingsResponse);
@SuppressWarnings("unchecked")
@@ -83,13 +85,17 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize();
// Create a FS repository using the path.repo location
- Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(),
- buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build()));
+ Request createFsRepositoryRequest = new Request("PUT", "/_snapshot/repository-fs");
+ createFsRepositoryRequest.setEntity(buildRepositorySettings(FsRepository.TYPE,
+ Settings.builder().put("location", pathRepo).build()));
+ Response createFsRepositoryResponse = client().performRequest(createFsRepositoryRequest);
assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the file://{path.repo} URL
- Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(),
- buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build()));
+ Request createFileRepositoryRequest = new Request("PUT", "/_snapshot/repository-file");
+ createFileRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
+ Settings.builder().put("url", pathRepoUri.toString()).build()));
+ Response createFileRepositoryResponse = client().performRequest(createFileRepositoryRequest);
assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the http://{fixture} URL
@@ -99,8 +105,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
try {
InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost());
if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) {
- Response createUrlRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-url", emptyMap(),
- buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", allowedUrl).build()));
+ Request createUrlRepositoryRequest = new Request("PUT", "/_snapshot/repository-url");
+ createUrlRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
+ Settings.builder().put("url", allowedUrl).build()));
+ Response createUrlRepositoryResponse = client().performRequest(createUrlRepositoryRequest);
assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
break;
}
@@ -126,4 +134,3 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
}
}
}
-
From e955ffc38deea100ed535b6fefd9c8993fc6e039 Mon Sep 17 00:00:00 2001
From: Jimi Ford
Date: Wed, 11 Jul 2018 15:01:49 -0400
Subject: [PATCH 13/17] Docs: fix typo in datehistogram (#31972)
---
.../aggregations/bucket/datehistogram-aggregation.asciidoc | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
index c2d1614ad6e..efbd8ef7389 100644
--- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -33,7 +33,7 @@ Available expressions for interval: `year` (`1y`), `quarter` (`1q`), `month` (`1
Time values can also be specified via abbreviations supported by <> parsing.
Note that fractional time values are not supported, but you can address this by shifting to another
time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than
-than days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not).
+days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not).
[source,js]
--------------------------------------------------
From 6136e49a05f370376341d55d84ab0cd895f38a34 Mon Sep 17 00:00:00 2001
From: Costin Leau
Date: Wed, 11 Jul 2018 23:31:46 +0300
Subject: [PATCH 14/17] SQL: HAVING clause should accept only aggregates
(#31872)
Improve Verifier to allow HAVING clauses only on aggregates
Close #31726
---
.../xpack/sql/analysis/analyzer/Verifier.java | 65 +++++++++++++++++--
.../analyzer/VerifierErrorMessagesTests.java | 10 +++
2 files changed, 69 insertions(+), 6 deletions(-)
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
index 6f8be61b463..4915a25a55b 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
@@ -213,10 +213,11 @@ abstract class Verifier {
* Check validity of Aggregate/GroupBy.
* This rule is needed for multiple reasons:
* 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar)
- * 2. the order/having might contain a non-grouped attribute. This is typically
+ * 2. the ORDER BY/HAVING might contain a non-grouped attribute. This is typically
* caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved
* (because the expression gets resolved little by little without being pushed down,
* without the Analyzer modifying anything.
+ * 2a. HAVING also requires an Aggregate function
* 3. composite agg (used for GROUP BY) allows ordering only on the group keys
*/
private static boolean checkGroupBy(LogicalPlan p, Set localFailures,
@@ -244,7 +245,7 @@ abstract class Verifier {
}
// make sure to compare attributes directly
- if (Expressions.anyMatch(a.groupings(),
+ if (Expressions.anyMatch(a.groupings(),
g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) {
return;
}
@@ -278,13 +279,14 @@ abstract class Verifier {
Map> missing = new LinkedHashMap<>();
Expression condition = f.condition();
- condition.collectFirstChildren(c -> checkGroupMatch(c, condition, a.groupings(), missing, functions));
+ // variation of checkGroupMatch customized for HAVING, which requires just aggregations
+ condition.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, condition, missing, functions));
if (!missing.isEmpty()) {
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
- localFailures.add(fail(condition, "Cannot filter by non-grouped column" + plural + " %s, expected %s",
- Expressions.names(missing.keySet()),
- Expressions.names(a.groupings())));
+ localFailures.add(
+ fail(condition, "Cannot filter HAVING on non-aggregate" + plural + " %s; consider using WHERE instead",
+ Expressions.names(missing.keySet())));
groupingFailures.add(a);
return false;
}
@@ -294,6 +296,57 @@ abstract class Verifier {
}
+ private static boolean checkGroupByHavingHasOnlyAggs(Expression e, Node> source,
+ Map> missing, Map functions) {
+
+ // resolve FunctionAttribute to backing functions
+ if (e instanceof FunctionAttribute) {
+ FunctionAttribute fa = (FunctionAttribute) e;
+ Function function = functions.get(fa.functionId());
+ // TODO: this should be handled by a different rule
+ if (function == null) {
+ return false;
+ }
+ e = function;
+ }
+
+ // scalar functions can be a binary tree
+ // first test the function against the grouping
+ // and if that fails, start unpacking hoping to find matches
+ if (e instanceof ScalarFunction) {
+ ScalarFunction sf = (ScalarFunction) e;
+
+ // unwrap function to find the base
+ for (Expression arg : sf.arguments()) {
+ arg.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, source, missing, functions));
+ }
+ return true;
+
+ } else if (e instanceof Score) {
+ // Score can't be used for having
+ missing.put(e, source);
+ return true;
+ }
+
+ // skip literals / foldable
+ if (e.foldable()) {
+ return true;
+ }
+ // skip aggs (allowed to refer to non-group columns)
+ if (Functions.isAggregate(e)) {
+ return true;
+ }
+
+ // left without leaves which have to match; that's a failure since everything should be based on an agg
+ if (e instanceof Attribute) {
+ missing.put(e, source);
+ return true;
+ }
+
+ return false;
+ }
+
+
// check whether plain columns specified in an agg are mentioned in the group-by
private static boolean checkGroupByAgg(LogicalPlan p, Set localFailures,
Set groupingFailures, Map functions) {
diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java
index 60875e0194a..dce665a97e9 100644
--- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java
+++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java
@@ -159,4 +159,14 @@ public class VerifierErrorMessagesTests extends ESTestCase {
assertEquals("1:44: Cannot order by non-grouped column [SCORE()], expected [int]",
verify("SELECT int FROM test GROUP BY int ORDER BY SCORE()"));
}
+
+ public void testHavingOnColumn() {
+ assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead",
+ verify("SELECT int FROM test GROUP BY int HAVING int > 2"));
+ }
+
+ public void testHavingOnScalar() {
+ assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead",
+ verify("SELECT int FROM test GROUP BY int HAVING 2 < ABS(int)"));
+ }
}
\ No newline at end of file
From dc633e0000679da7ff650c9a11f53b8e17c0df96 Mon Sep 17 00:00:00 2001
From: Costin Leau
Date: Wed, 11 Jul 2018 23:36:39 +0300
Subject: [PATCH 15/17] SQL: Support for escape sequences (#31884)
Enhance grammar to allow JDBC/ODBC escape sequences, namely
- date, time and timestamp {d ''}, {t ''} and {ts ''}
- guid {guid ''}
- LIKE escape {escape ''}
- scalar function {fn }
Fix #31883
---
x-pack/plugin/sql/src/main/antlr/SqlBase.g4 | 62 +-
.../plugin/sql/src/main/antlr/SqlBase.tokens | 96 +-
.../sql/src/main/antlr/SqlBaseLexer.tokens | 94 +-
.../xpack/sql/parser/ExpressionBuilder.java | 167 +-
.../xpack/sql/parser/LogicalPlanBuilder.java | 11 +-
.../xpack/sql/parser/SqlBaseBaseListener.java | 153 +-
.../xpack/sql/parser/SqlBaseBaseVisitor.java | 91 +-
.../xpack/sql/parser/SqlBaseLexer.java | 598 +++--
.../xpack/sql/parser/SqlBaseListener.java | 141 +-
.../xpack/sql/parser/SqlBaseParser.java | 2358 +++++++++++------
.../xpack/sql/parser/SqlBaseVisitor.java | 85 +-
.../xpack/sql/parser/SqlParser.java | 19 +-
.../sql/parser/EscapedFunctionsTests.java | 237 ++
13 files changed, 2829 insertions(+), 1283 deletions(-)
create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
index ea0b7da161c..2c3288babd6 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
@@ -74,9 +74,14 @@ queryNoWith
: queryTerm
/** we could add sort by - sort per partition */
(ORDER BY orderBy (',' orderBy)*)?
- (LIMIT limit=(INTEGER_VALUE | ALL))?
+ limitClause?
;
+limitClause
+ : LIMIT limit=(INTEGER_VALUE | ALL)
+ | LIMIT_ESC limit=(INTEGER_VALUE | ALL) ESC_END
+ ;
+
queryTerm
: querySpecification #queryPrimaryDefault
| '(' queryNoWith ')' #subquery
@@ -185,7 +190,12 @@ predicate
;
pattern
- : value=string (ESCAPE escape=string)?
+ : value=string patternEscape?
+ ;
+
+patternEscape
+ : ESCAPE escape=string
+ | ESCAPE_ESC escape=string '}'
;
valueExpression
@@ -197,18 +207,44 @@ valueExpression
;
primaryExpression
- : CAST '(' expression AS dataType ')' #cast
- | EXTRACT '(' field=identifier FROM valueExpression ')' #extract
+ : castExpression #cast
+ | extractExpression #extract
| constant #constantDefault
| ASTERISK #star
| (qualifiedName DOT)? ASTERISK #star
- | identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall
+ | functionExpression #function
| '(' query ')' #subqueryExpression
| identifier #columnReference
| qualifiedName #dereference
| '(' expression ')' #parenthesizedExpression
;
+castExpression
+ : castTemplate
+ | FUNCTION_ESC castTemplate ESC_END
+ ;
+
+castTemplate
+ : CAST '(' expression AS dataType ')'
+ ;
+
+extractExpression
+ : extractTemplate
+ | FUNCTION_ESC extractTemplate ESC_END
+ ;
+
+extractTemplate
+ : EXTRACT '(' field=identifier FROM valueExpression ')'
+ ;
+
+functionExpression
+ : functionTemplate
+ | FUNCTION_ESC functionTemplate '}'
+ ;
+
+functionTemplate
+ : identifier '(' (setQuantifier? expression (',' expression)*)? ')'
+ ;
constant
: NULL #nullLiteral
@@ -216,6 +252,10 @@ constant
| booleanValue #booleanLiteral
| STRING+ #stringLiteral
| PARAM #paramLiteral
+ | DATE_ESC string ESC_END #dateEscapedLiteral
+ | TIME_ESC string ESC_END #timeEscapedLiteral
+ | TIMESTAMP_ESC string ESC_END #timestampEscapedLiteral
+ | GUID_ESC string ESC_END #guidEscapedLiteral
;
comparisonOperator
@@ -351,6 +391,18 @@ VERIFY: 'VERIFY';
WHERE: 'WHERE';
WITH: 'WITH';
+// Escaped Sequence
+ESCAPE_ESC: '{ESCAPE';
+FUNCTION_ESC: '{FN';
+LIMIT_ESC:'{LIMIT';
+DATE_ESC: '{D';
+TIME_ESC: '{T';
+TIMESTAMP_ESC: '{TS';
+// mapped to string literal
+GUID_ESC: '{GUID';
+
+ESC_END: '}';
+
EQ : '=';
NEQ : '<>' | '!=' | '<=>';
LT : '<';
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
index 87cf9a4809d..527cc676e1d 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
@@ -69,33 +69,41 @@ USING=68
VERIFY=69
WHERE=70
WITH=71
-EQ=72
-NEQ=73
-LT=74
-LTE=75
-GT=76
-GTE=77
-PLUS=78
-MINUS=79
-ASTERISK=80
-SLASH=81
-PERCENT=82
-CONCAT=83
-DOT=84
-PARAM=85
-STRING=86
-INTEGER_VALUE=87
-DECIMAL_VALUE=88
-IDENTIFIER=89
-DIGIT_IDENTIFIER=90
-TABLE_IDENTIFIER=91
-QUOTED_IDENTIFIER=92
-BACKQUOTED_IDENTIFIER=93
-SIMPLE_COMMENT=94
-BRACKETED_COMMENT=95
-WS=96
-UNRECOGNIZED=97
-DELIMITER=98
+ESCAPE_ESC=72
+FUNCTION_ESC=73
+LIMIT_ESC=74
+DATE_ESC=75
+TIME_ESC=76
+TIMESTAMP_ESC=77
+GUID_ESC=78
+ESC_END=79
+EQ=80
+NEQ=81
+LT=82
+LTE=83
+GT=84
+GTE=85
+PLUS=86
+MINUS=87
+ASTERISK=88
+SLASH=89
+PERCENT=90
+CONCAT=91
+DOT=92
+PARAM=93
+STRING=94
+INTEGER_VALUE=95
+DECIMAL_VALUE=96
+IDENTIFIER=97
+DIGIT_IDENTIFIER=98
+TABLE_IDENTIFIER=99
+QUOTED_IDENTIFIER=100
+BACKQUOTED_IDENTIFIER=101
+SIMPLE_COMMENT=102
+BRACKETED_COMMENT=103
+WS=104
+UNRECOGNIZED=105
+DELIMITER=106
'('=1
')'=2
','=3
@@ -167,16 +175,24 @@ DELIMITER=98
'VERIFY'=69
'WHERE'=70
'WITH'=71
-'='=72
-'<'=74
-'<='=75
-'>'=76
-'>='=77
-'+'=78
-'-'=79
-'*'=80
-'/'=81
-'%'=82
-'||'=83
-'.'=84
-'?'=85
+'{ESCAPE'=72
+'{FN'=73
+'{LIMIT'=74
+'{D'=75
+'{T'=76
+'{TS'=77
+'{GUID'=78
+'}'=79
+'='=80
+'<'=82
+'<='=83
+'>'=84
+'>='=85
+'+'=86
+'-'=87
+'*'=88
+'/'=89
+'%'=90
+'||'=91
+'.'=92
+'?'=93
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
index a687a9215ec..155d4860e0e 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
@@ -69,32 +69,40 @@ USING=68
VERIFY=69
WHERE=70
WITH=71
-EQ=72
-NEQ=73
-LT=74
-LTE=75
-GT=76
-GTE=77
-PLUS=78
-MINUS=79
-ASTERISK=80
-SLASH=81
-PERCENT=82
-CONCAT=83
-DOT=84
-PARAM=85
-STRING=86
-INTEGER_VALUE=87
-DECIMAL_VALUE=88
-IDENTIFIER=89
-DIGIT_IDENTIFIER=90
-TABLE_IDENTIFIER=91
-QUOTED_IDENTIFIER=92
-BACKQUOTED_IDENTIFIER=93
-SIMPLE_COMMENT=94
-BRACKETED_COMMENT=95
-WS=96
-UNRECOGNIZED=97
+ESCAPE_ESC=72
+FUNCTION_ESC=73
+LIMIT_ESC=74
+DATE_ESC=75
+TIME_ESC=76
+TIMESTAMP_ESC=77
+GUID_ESC=78
+ESC_END=79
+EQ=80
+NEQ=81
+LT=82
+LTE=83
+GT=84
+GTE=85
+PLUS=86
+MINUS=87
+ASTERISK=88
+SLASH=89
+PERCENT=90
+CONCAT=91
+DOT=92
+PARAM=93
+STRING=94
+INTEGER_VALUE=95
+DECIMAL_VALUE=96
+IDENTIFIER=97
+DIGIT_IDENTIFIER=98
+TABLE_IDENTIFIER=99
+QUOTED_IDENTIFIER=100
+BACKQUOTED_IDENTIFIER=101
+SIMPLE_COMMENT=102
+BRACKETED_COMMENT=103
+WS=104
+UNRECOGNIZED=105
'('=1
')'=2
','=3
@@ -166,16 +174,24 @@ UNRECOGNIZED=97
'VERIFY'=69
'WHERE'=70
'WITH'=71
-'='=72
-'<'=74
-'<='=75
-'>'=76
-'>='=77
-'+'=78
-'-'=79
-'*'=80
-'/'=81
-'%'=82
-'||'=83
-'.'=84
-'?'=85
+'{ESCAPE'=72
+'{FN'=73
+'{LIMIT'=74
+'{D'=75
+'{T'=76
+'{TS'=77
+'{GUID'=78
+'}'=79
+'='=80
+'<'=82
+'<='=83
+'>'=84
+'>='=85
+'+'=86
+'-'=87
+'*'=88
+'/'=89
+'%'=90
+'||'=91
+'.'=92
+'?'=93
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
index 35eb76af67c..66ec98ea53c 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
@@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.expression.Order;
import org.elasticsearch.xpack.sql.expression.ScalarSubquery;
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.sql.expression.UnresolvedStar;
+import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add;
@@ -48,14 +49,19 @@ import org.elasticsearch.xpack.sql.expression.regex.RLike;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractTemplateContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionTemplateContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GuidEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext;
@@ -66,6 +72,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParamLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternEscapeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext;
@@ -76,10 +83,16 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimeEscapedLiteralContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext;
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.joda.time.format.ISODateTimeFormat;
import java.math.BigDecimal;
import java.util.List;
@@ -222,17 +235,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
char escape = 0;
- String escapeString = string(ctx.escape);
+ PatternEscapeContext escapeCtx = ctx.patternEscape();
+ String escapeString = escapeCtx == null ? null : string(escapeCtx.escape);
if (Strings.hasText(escapeString)) {
// shouldn't happen but adding validation in case the string parsing gets wonky
if (escapeString.length() > 1) {
- throw new ParsingException(source(ctx.escape), "A character not a string required for escaping; found [{}]", escapeString);
+ throw new ParsingException(source(escapeCtx), "A character not a string required for escaping; found [{}]", escapeString);
} else if (escapeString.length() == 1) {
escape = escapeString.charAt(0);
// these chars already have a meaning
if (escape == '*' || escape == '%' || escape == '_') {
- throw new ParsingException(source(ctx.escape), "Char [{}] cannot be used for escaping", escape);
+ throw new ParsingException(source(escapeCtx.escape), "Char [{}] cannot be used for escaping", escape);
}
// lastly validate that escape chars (if present) are followed by special chars
for (int i = 0; i < pattern.length(); i++) {
@@ -324,11 +338,6 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
ctx.DESC() != null ? Order.OrderDirection.DESC : Order.OrderDirection.ASC);
}
- @Override
- public Object visitCast(CastContext ctx) {
- return new Cast(source(ctx), expression(ctx.expression()), typedParsing(ctx.dataType(), DataType.class));
- }
-
@Override
public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) {
String type = visitIdentifier(ctx.identifier()).toLowerCase(Locale.ROOT);
@@ -367,20 +376,32 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
}
+ //
+ // Functions template
+ //
@Override
- public Object visitFunctionCall(FunctionCallContext ctx) {
- String name = visitIdentifier(ctx.identifier());
- boolean isDistinct = ctx.setQuantifier() != null && ctx.setQuantifier().DISTINCT() != null;
- UnresolvedFunction.ResolutionType resolutionType =
- isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
- return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(ctx.expression()));
+ public Cast visitCastExpression(CastExpressionContext ctx) {
+ CastTemplateContext ctc = ctx.castTemplate();
+ return new Cast(source(ctc), expression(ctc.expression()), typedParsing(ctc.dataType(), DataType.class));
}
@Override
- public Object visitExtract(ExtractContext ctx) {
- String fieldString = visitIdentifier(ctx.field);
- return new UnresolvedFunction(source(ctx), fieldString,
- UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(ctx.valueExpression())));
+ public Function visitExtractExpression(ExtractExpressionContext ctx) {
+ ExtractTemplateContext template = ctx.extractTemplate();
+ String fieldString = visitIdentifier(template.field);
+ return new UnresolvedFunction(source(template), fieldString,
+ UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(template.valueExpression())));
+ }
+
+ @Override
+ public Function visitFunctionExpression(FunctionExpressionContext ctx) {
+ FunctionTemplateContext template = ctx.functionTemplate();
+
+ String name = visitIdentifier(template.identifier());
+ boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null;
+ UnresolvedFunction.ResolutionType resolutionType =
+ isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
+ return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(template.expression()));
}
@Override
@@ -445,12 +466,12 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
@Override
- public Object visitDecimalLiteral(DecimalLiteralContext ctx) {
+ public Literal visitDecimalLiteral(DecimalLiteralContext ctx) {
return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE);
}
@Override
- public Object visitIntegerLiteral(IntegerLiteralContext ctx) {
+ public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
BigDecimal bigD = new BigDecimal(ctx.getText());
long value = bigD.longValueExact();
@@ -463,7 +484,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
@Override
- public Object visitParamLiteral(ParamLiteralContext ctx) {
+ public Literal visitParamLiteral(ParamLiteralContext ctx) {
SqlTypedParamValue param = param(ctx.PARAM());
Location loc = source(ctx);
if (param.value == null) {
@@ -522,4 +543,100 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
return params.get(token);
}
-}
+
+ @Override
+ public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+ Location loc = source(ctx);
+ // parse yyyy-MM-dd
+ DateTime dt = null;
+ try {
+ dt = ISODateTimeFormat.date().parseDateTime(string);
+ } catch(IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid date received; {}", ex.getMessage());
+ }
+ return new Literal(loc, dt, DataType.DATE);
+ }
+
+ @Override
+ public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+ Location loc = source(ctx);
+
+ // parse HH:mm:ss
+ DateTime dt = null;
+ try {
+ dt = ISODateTimeFormat.hourMinuteSecond().parseDateTime(string);
+ } catch (IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid time received; {}", ex.getMessage());
+ }
+
+ throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well");
+ }
+
+ @Override
+ public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+
+ Location loc = source(ctx);
+ // parse yyyy-mm-dd hh:mm:ss(.f...)
+ DateTime dt = null;
+ try {
+ DateTimeFormatter formatter = new DateTimeFormatterBuilder()
+ .append(ISODateTimeFormat.date())
+ .appendLiteral(" ")
+ .append(ISODateTimeFormat.hourMinuteSecondFraction())
+ .toFormatter();
+ dt = formatter.parseDateTime(string);
+ } catch (IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid timestamp received; {}", ex.getMessage());
+ }
+ return new Literal(loc, dt, DataType.DATE);
+ }
+
+ @Override
+ public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+
+ Location loc = source(ctx.string());
+ // basic validation
+ String lowerCase = string.toLowerCase(Locale.ROOT);
+ // needs to be format nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn
+ // since the length is fixed, the validation happens on absolute values
+ // not pretty but it's fast and doesn't create any extra objects
+
+ String errorPrefix = "Invalid GUID, ";
+
+ if (lowerCase.length() != 36) {
+ throw new ParsingException(loc, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short");
+ }
+
+ int[] separatorPos = { 8, 13, 18, 23 };
+ for (int pos : separatorPos) {
+ if (lowerCase.charAt(pos) != '-') {
+ throw new ParsingException(loc, "{}expected group separator at offset [{}], found [{}]",
+ errorPrefix, pos, string.charAt(pos));
+ }
+ }
+
+ String HEXA = "0123456789abcdef";
+
+ for (int i = 0; i < lowerCase.length(); i++) {
+ // skip separators
+ boolean inspect = true;
+ for (int pos : separatorPos) {
+ if (i == pos) {
+ inspect = false;
+ break;
+ } else if (pos > i) {
+ break;
+ }
+ }
+ if (inspect && HEXA.indexOf(lowerCase.charAt(i)) < 0) {
+ throw new ParsingException(loc, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i));
+ }
+ }
+
+ return new Literal(source(ctx), string, DataType.KEYWORD);
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
index 3435994a0fc..58d858c4241 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
@@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext;
@@ -89,9 +90,13 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder {
plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class));
}
- if (ctx.limit != null && ctx.INTEGER_VALUE() != null) {
- plan = new Limit(source(ctx.limit), new Literal(source(ctx),
- Integer.parseInt(ctx.limit.getText()), DataType.INTEGER), plan);
+ LimitClauseContext limitClause = ctx.limitClause();
+ if (limitClause != null) {
+ Token limit = limitClause.limit;
+ if (limit != null && limitClause.INTEGER_VALUE() != null) {
+ plan = new Limit(source(limitClause), new Literal(source(limitClause),
+ Integer.parseInt(limit.getText()), DataType.INTEGER), plan);
+ }
}
return plan;
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
index 4e80e8db9bb..b353bcf6521 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
@@ -208,6 +203,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
/**
* {@inheritDoc}
*
@@ -556,6 +563,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitPattern(SqlBaseParser.PatternContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
/**
* {@inheritDoc}
*
@@ -657,13 +676,13 @@ class SqlBaseBaseListener implements SqlBaseListener {
*
* The default implementation does nothing.
*/
- @Override public void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx) { }
+ @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { }
+ @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
@@ -712,6 +731,78 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
/**
* {@inheritDoc}
*
@@ -772,6 +863,54 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
index 1adb0a423c7..d40ae6daa6e 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
@@ -128,6 +123,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitLimitClause(SqlBaseParser.LimitClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -331,6 +333,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitPattern(SqlBaseParser.PatternContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -393,7 +402,7 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.
*/
- @Override public T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { return visitChildren(ctx); }
+ @Override public T visitFunction(SqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -422,6 +431,48 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitCastExpression(SqlBaseParser.CastExpressionContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -457,6 +508,34 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java
index c54c5e3810c..588f3ef028d 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java
@@ -1,15 +1,13 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
+import org.antlr.v4.runtime.Token;
+import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
+import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
class SqlBaseLexer extends Lexer {
@@ -28,11 +26,13 @@ class SqlBaseLexer extends Lexer {
NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52,
PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59,
SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67,
- USING=68, VERIFY=69, WHERE=70, WITH=71, EQ=72, NEQ=73, LT=74, LTE=75,
- GT=76, GTE=77, PLUS=78, MINUS=79, ASTERISK=80, SLASH=81, PERCENT=82, CONCAT=83,
- DOT=84, PARAM=85, STRING=86, INTEGER_VALUE=87, DECIMAL_VALUE=88, IDENTIFIER=89,
- DIGIT_IDENTIFIER=90, TABLE_IDENTIFIER=91, QUOTED_IDENTIFIER=92, BACKQUOTED_IDENTIFIER=93,
- SIMPLE_COMMENT=94, BRACKETED_COMMENT=95, WS=96, UNRECOGNIZED=97;
+ USING=68, VERIFY=69, WHERE=70, WITH=71, ESCAPE_ESC=72, FUNCTION_ESC=73,
+ LIMIT_ESC=74, DATE_ESC=75, TIME_ESC=76, TIMESTAMP_ESC=77, GUID_ESC=78,
+ ESC_END=79, EQ=80, NEQ=81, LT=82, LTE=83, GT=84, GTE=85, PLUS=86, MINUS=87,
+ ASTERISK=88, SLASH=89, PERCENT=90, CONCAT=91, DOT=92, PARAM=93, STRING=94,
+ INTEGER_VALUE=95, DECIMAL_VALUE=96, IDENTIFIER=97, DIGIT_IDENTIFIER=98,
+ TABLE_IDENTIFIER=99, QUOTED_IDENTIFIER=100, BACKQUOTED_IDENTIFIER=101,
+ SIMPLE_COMMENT=102, BRACKETED_COMMENT=103, WS=104, UNRECOGNIZED=105;
public static String[] modeNames = {
"DEFAULT_MODE"
};
@@ -46,12 +46,13 @@ class SqlBaseLexer extends Lexer {
"LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED",
"OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE",
"QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT",
- "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ",
- "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
- "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
- "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER",
- "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT",
- "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
+ "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC",
+ "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC",
+ "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
+ "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING",
+ "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER",
+ "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER",
+ "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
};
private static final String[] _LITERAL_NAMES = {
@@ -65,8 +66,9 @@ class SqlBaseLexer extends Lexer {
"'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'",
"'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'",
"'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'",
- "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'",
- "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'"
+ "'WHERE'", "'WITH'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'",
+ "'{TS'", "'{GUID'", "'}'", "'='", null, "'<'", "'<='", "'>'", "'>='",
+ "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY",
@@ -77,12 +79,13 @@ class SqlBaseLexer extends Lexer {
"LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED",
"OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE",
"QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT",
- "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ",
- "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
- "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
- "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER",
- "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS",
- "UNRECOGNIZED"
+ "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC",
+ "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC",
+ "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
+ "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING",
+ "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER",
+ "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT",
+ "WS", "UNRECOGNIZED"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
@@ -139,7 +142,7 @@ class SqlBaseLexer extends Lexer {
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
- "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2c\u033b\b\1\4\2\t"+
+ "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u0370\b\1\4\2\t"+
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
@@ -150,276 +153,293 @@ class SqlBaseLexer extends Lexer {
"\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+
"\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+
"\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+
- "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6"+
- "\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3"+
- "\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f"+
- "\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+
- "\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21"+
- "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23"+
- "\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25"+
- "\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27"+
- "\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+
- "\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+
- "\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34"+
- "\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36"+
- "\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3"+
- "!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3"+
- "$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3("+
- "\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,"+
- "\3,\3-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60"+
- "\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63"+
- "\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65"+
- "\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67"+
- "\3\67\3\67\3\67\3\67\38\38\38\38\38\38\39\39\39\39\39\39\3:\3:\3:\3:\3"+
- ":\3:\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3"+
- ">\3>\3>\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3"+
- "B\3B\3B\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3"+
- "F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3J\3J\3J\3J\3J\3J\3"+
- "J\5J\u027b\nJ\3K\3K\3L\3L\3L\3M\3M\3N\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3"+
- "S\3S\3T\3T\3T\3U\3U\3V\3V\3W\3W\3W\3W\7W\u029c\nW\fW\16W\u029f\13W\3W"+
- "\3W\3X\6X\u02a4\nX\rX\16X\u02a5\3Y\6Y\u02a9\nY\rY\16Y\u02aa\3Y\3Y\7Y\u02af"+
- "\nY\fY\16Y\u02b2\13Y\3Y\3Y\6Y\u02b6\nY\rY\16Y\u02b7\3Y\6Y\u02bb\nY\rY"+
- "\16Y\u02bc\3Y\3Y\7Y\u02c1\nY\fY\16Y\u02c4\13Y\5Y\u02c6\nY\3Y\3Y\3Y\3Y"+
- "\6Y\u02cc\nY\rY\16Y\u02cd\3Y\3Y\5Y\u02d2\nY\3Z\3Z\5Z\u02d6\nZ\3Z\3Z\3"+
- "Z\7Z\u02db\nZ\fZ\16Z\u02de\13Z\3[\3[\3[\3[\6[\u02e4\n[\r[\16[\u02e5\3"+
- "\\\3\\\3\\\3\\\6\\\u02ec\n\\\r\\\16\\\u02ed\3]\3]\3]\3]\7]\u02f4\n]\f"+
- "]\16]\u02f7\13]\3]\3]\3^\3^\3^\3^\7^\u02ff\n^\f^\16^\u0302\13^\3^\3^\3"+
- "_\3_\5_\u0308\n_\3_\6_\u030b\n_\r_\16_\u030c\3`\3`\3a\3a\3b\3b\3b\3b\7"+
- "b\u0317\nb\fb\16b\u031a\13b\3b\5b\u031d\nb\3b\5b\u0320\nb\3b\3b\3c\3c"+
- "\3c\3c\3c\7c\u0329\nc\fc\16c\u032c\13c\3c\3c\3c\3c\3c\3d\6d\u0334\nd\r"+
- "d\16d\u0335\3d\3d\3e\3e\3\u032a\2f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n"+
- "\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30"+
- "/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.["+
- "/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083"+
- "C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097"+
- "M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab"+
- "W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd\2\u00bf"+
- "\2\u00c1\2\u00c3`\u00c5a\u00c7b\u00c9c\3\2\f\3\2))\4\2BBaa\5\2<\3>\3>\3>\3?\3?\3?\3?"+
+ "\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3C\3C\3C\3C"+
+ "\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G"+
+ "\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3K\3K\3K"+
+ "\3K\3K\3K\3K\3L\3L\3L\3M\3M\3M\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3Q"+
+ "\3Q\3R\3R\3R\3R\3R\3R\3R\5R\u02b0\nR\3S\3S\3T\3T\3T\3U\3U\3V\3V\3V\3W"+
+ "\3W\3X\3X\3Y\3Y\3Z\3Z\3[\3[\3\\\3\\\3\\\3]\3]\3^\3^\3_\3_\3_\3_\7_\u02d1"+
+ "\n_\f_\16_\u02d4\13_\3_\3_\3`\6`\u02d9\n`\r`\16`\u02da\3a\6a\u02de\na"+
+ "\ra\16a\u02df\3a\3a\7a\u02e4\na\fa\16a\u02e7\13a\3a\3a\6a\u02eb\na\ra"+
+ "\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+
+ "\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+
+ "a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+
+ "\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\3d\6d\u0321\nd\rd\16d\u0322\3e\3e\3"+
+ "e\3e\7e\u0329\ne\fe\16e\u032c\13e\3e\3e\3f\3f\3f\3f\7f\u0334\nf\ff\16"+
+ "f\u0337\13f\3f\3f\3g\3g\5g\u033d\ng\3g\6g\u0340\ng\rg\16g\u0341\3h\3h"+
+ "\3i\3i\3j\3j\3j\3j\7j\u034c\nj\fj\16j\u034f\13j\3j\5j\u0352\nj\3j\5j\u0355"+
+ "\nj\3j\3j\3k\3k\3k\3k\3k\7k\u035e\nk\fk\16k\u0361\13k\3k\3k\3k\3k\3k\3"+
+ "l\6l\u0369\nl\rl\16l\u036a\3l\3l\3m\3m\3\u035f\2n\3\3\5\4\7\5\t\6\13\7"+
+ "\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+
+ ")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+
+ ")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081"+
+ "B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+
+ "L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9"+
+ "V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+
+ "`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+
+ "\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2<\3\2\2\2\u017d\u017e"+
- "\7H\2\2\u017e\u017f\7W\2\2\u017f\u0180\7P\2\2\u0180\u0181\7E\2\2\u0181"+
- "\u0182\7V\2\2\u0182\u0183\7K\2\2\u0183\u0184\7Q\2\2\u0184\u0185\7P\2\2"+
- "\u0185\u0186\7U\2\2\u0186@\3\2\2\2\u0187\u0188\7I\2\2\u0188\u0189\7T\2"+
- "\2\u0189\u018a\7C\2\2\u018a\u018b\7R\2\2\u018b\u018c\7J\2\2\u018c\u018d"+
- "\7X\2\2\u018d\u018e\7K\2\2\u018e\u018f\7\\\2\2\u018fB\3\2\2\2\u0190\u0191"+
- "\7I\2\2\u0191\u0192\7T\2\2\u0192\u0193\7Q\2\2\u0193\u0194\7W\2\2\u0194"+
- "\u0195\7R\2\2\u0195D\3\2\2\2\u0196\u0197\7J\2\2\u0197\u0198\7C\2\2\u0198"+
- "\u0199\7X\2\2\u0199\u019a\7K\2\2\u019a\u019b\7P\2\2\u019b\u019c\7I\2\2"+
- "\u019cF\3\2\2\2\u019d\u019e\7K\2\2\u019e\u019f\7P\2\2\u019fH\3\2\2\2\u01a0"+
- "\u01a1\7K\2\2\u01a1\u01a2\7P\2\2\u01a2\u01a3\7P\2\2\u01a3\u01a4\7G\2\2"+
- "\u01a4\u01a5\7T\2\2\u01a5J\3\2\2\2\u01a6\u01a7\7K\2\2\u01a7\u01a8\7U\2"+
- "\2\u01a8L\3\2\2\2\u01a9\u01aa\7L\2\2\u01aa\u01ab\7Q\2\2\u01ab\u01ac\7"+
- "K\2\2\u01ac\u01ad\7P\2\2\u01adN\3\2\2\2\u01ae\u01af\7N\2\2\u01af\u01b0"+
- "\7G\2\2\u01b0\u01b1\7H\2\2\u01b1\u01b2\7V\2\2\u01b2P\3\2\2\2\u01b3\u01b4"+
- "\7N\2\2\u01b4\u01b5\7K\2\2\u01b5\u01b6\7M\2\2\u01b6\u01b7\7G\2\2\u01b7"+
- "R\3\2\2\2\u01b8\u01b9\7N\2\2\u01b9\u01ba\7K\2\2\u01ba\u01bb\7O\2\2\u01bb"+
- "\u01bc\7K\2\2\u01bc\u01bd\7V\2\2\u01bdT\3\2\2\2\u01be\u01bf\7O\2\2\u01bf"+
- "\u01c0\7C\2\2\u01c0\u01c1\7R\2\2\u01c1\u01c2\7R\2\2\u01c2\u01c3\7G\2\2"+
- "\u01c3\u01c4\7F\2\2\u01c4V\3\2\2\2\u01c5\u01c6\7O\2\2\u01c6\u01c7\7C\2"+
- "\2\u01c7\u01c8\7V\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7J\2\2\u01caX\3\2"+
- "\2\2\u01cb\u01cc\7P\2\2\u01cc\u01cd\7C\2\2\u01cd\u01ce\7V\2\2\u01ce\u01cf"+
- "\7W\2\2\u01cf\u01d0\7T\2\2\u01d0\u01d1\7C\2\2\u01d1\u01d2\7N\2\2\u01d2"+
- "Z\3\2\2\2\u01d3\u01d4\7P\2\2\u01d4\u01d5\7Q\2\2\u01d5\u01d6\7V\2\2\u01d6"+
- "\\\3\2\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9\7W\2\2\u01d9\u01da\7N\2\2\u01da"+
- "\u01db\7N\2\2\u01db^\3\2\2\2\u01dc\u01dd\7Q\2\2\u01dd\u01de\7P\2\2\u01de"+
- "`\3\2\2\2\u01df\u01e0\7Q\2\2\u01e0\u01e1\7R\2\2\u01e1\u01e2\7V\2\2\u01e2"+
- "\u01e3\7K\2\2\u01e3\u01e4\7O\2\2\u01e4\u01e5\7K\2\2\u01e5\u01e6\7\\\2"+
- "\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7F\2\2\u01e8b\3\2\2\2\u01e9\u01ea\7"+
- "Q\2\2\u01ea\u01eb\7T\2\2\u01ebd\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee"+
- "\7T\2\2\u01ee\u01ef\7F\2\2\u01ef\u01f0\7G\2\2\u01f0\u01f1\7T\2\2\u01f1"+
- "f\3\2\2\2\u01f2\u01f3\7Q\2\2\u01f3\u01f4\7W\2\2\u01f4\u01f5\7V\2\2\u01f5"+
- "\u01f6\7G\2\2\u01f6\u01f7\7T\2\2\u01f7h\3\2\2\2\u01f8\u01f9\7R\2\2\u01f9"+
- "\u01fa\7C\2\2\u01fa\u01fb\7T\2\2\u01fb\u01fc\7U\2\2\u01fc\u01fd\7G\2\2"+
- "\u01fd\u01fe\7F\2\2\u01fej\3\2\2\2\u01ff\u0200\7R\2\2\u0200\u0201\7J\2"+
- "\2\u0201\u0202\7[\2\2\u0202\u0203\7U\2\2\u0203\u0204\7K\2\2\u0204\u0205"+
- "\7E\2\2\u0205\u0206\7C\2\2\u0206\u0207\7N\2\2\u0207l\3\2\2\2\u0208\u0209"+
- "\7R\2\2\u0209\u020a\7N\2\2\u020a\u020b\7C\2\2\u020b\u020c\7P\2\2\u020c"+
- "n\3\2\2\2\u020d\u020e\7T\2\2\u020e\u020f\7K\2\2\u020f\u0210\7I\2\2\u0210"+
- "\u0211\7J\2\2\u0211\u0212\7V\2\2\u0212p\3\2\2\2\u0213\u0214\7T\2\2\u0214"+
- "\u0215\7N\2\2\u0215\u0216\7K\2\2\u0216\u0217\7M\2\2\u0217\u0218\7G\2\2"+
- "\u0218r\3\2\2\2\u0219\u021a\7S\2\2\u021a\u021b\7W\2\2\u021b\u021c\7G\2"+
- "\2\u021c\u021d\7T\2\2\u021d\u021e\7[\2\2\u021et\3\2\2\2\u021f\u0220\7"+
- "U\2\2\u0220\u0221\7E\2\2\u0221\u0222\7J\2\2\u0222\u0223\7G\2\2\u0223\u0224"+
- "\7O\2\2\u0224\u0225\7C\2\2\u0225\u0226\7U\2\2\u0226v\3\2\2\2\u0227\u0228"+
- "\7U\2\2\u0228\u0229\7G\2\2\u0229\u022a\7N\2\2\u022a\u022b\7G\2\2\u022b"+
- "\u022c\7E\2\2\u022c\u022d\7V\2\2\u022dx\3\2\2\2\u022e\u022f\7U\2\2\u022f"+
- "\u0230\7J\2\2\u0230\u0231\7Q\2\2\u0231\u0232\7Y\2\2\u0232z\3\2\2\2\u0233"+
- "\u0234\7U\2\2\u0234\u0235\7[\2\2\u0235\u0236\7U\2\2\u0236|\3\2\2\2\u0237"+
- "\u0238\7V\2\2\u0238\u0239\7C\2\2\u0239\u023a\7D\2\2\u023a\u023b\7N\2\2"+
- "\u023b\u023c\7G\2\2\u023c~\3\2\2\2\u023d\u023e\7V\2\2\u023e\u023f\7C\2"+
- "\2\u023f\u0240\7D\2\2\u0240\u0241\7N\2\2\u0241\u0242\7G\2\2\u0242\u0243"+
- "\7U\2\2\u0243\u0080\3\2\2\2\u0244\u0245\7V\2\2\u0245\u0246\7G\2\2\u0246"+
- "\u0247\7Z\2\2\u0247\u0248\7V\2\2\u0248\u0082\3\2\2\2\u0249\u024a\7V\2"+
- "\2\u024a\u024b\7T\2\2\u024b\u024c\7W\2\2\u024c\u024d\7G\2\2\u024d\u0084"+
- "\3\2\2\2\u024e\u024f\7V\2\2\u024f\u0250\7[\2\2\u0250\u0251\7R\2\2\u0251"+
- "\u0252\7G\2\2\u0252\u0086\3\2\2\2\u0253\u0254\7V\2\2\u0254\u0255\7[\2"+
- "\2\u0255\u0256\7R\2\2\u0256\u0257\7G\2\2\u0257\u0258\7U\2\2\u0258\u0088"+
- "\3\2\2\2\u0259\u025a\7W\2\2\u025a\u025b\7U\2\2\u025b\u025c\7K\2\2\u025c"+
- "\u025d\7P\2\2\u025d\u025e\7I\2\2\u025e\u008a\3\2\2\2\u025f\u0260\7X\2"+
- "\2\u0260\u0261\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7K\2\2\u0263\u0264"+
- "\7H\2\2\u0264\u0265\7[\2\2\u0265\u008c\3\2\2\2\u0266\u0267\7Y\2\2\u0267"+
- "\u0268\7J\2\2\u0268\u0269\7G\2\2\u0269\u026a\7T\2\2\u026a\u026b\7G\2\2"+
- "\u026b\u008e\3\2\2\2\u026c\u026d\7Y\2\2\u026d\u026e\7K\2\2\u026e\u026f"+
- "\7V\2\2\u026f\u0270\7J\2\2\u0270\u0090\3\2\2\2\u0271\u0272\7?\2\2\u0272"+
- "\u0092\3\2\2\2\u0273\u0274\7>\2\2\u0274\u027b\7@\2\2\u0275\u0276\7#\2"+
- "\2\u0276\u027b\7?\2\2\u0277\u0278\7>\2\2\u0278\u0279\7?\2\2\u0279\u027b"+
- "\7@\2\2\u027a\u0273\3\2\2\2\u027a\u0275\3\2\2\2\u027a\u0277\3\2\2\2\u027b"+
- "\u0094\3\2\2\2\u027c\u027d\7>\2\2\u027d\u0096\3\2\2\2\u027e\u027f\7>\2"+
- "\2\u027f\u0280\7?\2\2\u0280\u0098\3\2\2\2\u0281\u0282\7@\2\2\u0282\u009a"+
- "\3\2\2\2\u0283\u0284\7@\2\2\u0284\u0285\7?\2\2\u0285\u009c\3\2\2\2\u0286"+
- "\u0287\7-\2\2\u0287\u009e\3\2\2\2\u0288\u0289\7/\2\2\u0289\u00a0\3\2\2"+
- "\2\u028a\u028b\7,\2\2\u028b\u00a2\3\2\2\2\u028c\u028d\7\61\2\2\u028d\u00a4"+
- "\3\2\2\2\u028e\u028f\7\'\2\2\u028f\u00a6\3\2\2\2\u0290\u0291\7~\2\2\u0291"+
- "\u0292\7~\2\2\u0292\u00a8\3\2\2\2\u0293\u0294\7\60\2\2\u0294\u00aa\3\2"+
- "\2\2\u0295\u0296\7A\2\2\u0296\u00ac\3\2\2\2\u0297\u029d\7)\2\2\u0298\u029c"+
- "\n\2\2\2\u0299\u029a\7)\2\2\u029a\u029c\7)\2\2\u029b\u0298\3\2\2\2\u029b"+
- "\u0299\3\2\2\2\u029c\u029f\3\2\2\2\u029d\u029b\3\2\2\2\u029d\u029e\3\2"+
- "\2\2\u029e\u02a0\3\2\2\2\u029f\u029d\3\2\2\2\u02a0\u02a1\7)\2\2\u02a1"+
- "\u00ae\3\2\2\2\u02a2\u02a4\5\u00bf`\2\u02a3\u02a2\3\2\2\2\u02a4\u02a5"+
- "\3\2\2\2\u02a5\u02a3\3\2\2\2\u02a5\u02a6\3\2\2\2\u02a6\u00b0\3\2\2\2\u02a7"+
- "\u02a9\5\u00bf`\2\u02a8\u02a7\3\2\2\2\u02a9\u02aa\3\2\2\2\u02aa\u02a8"+
- "\3\2\2\2\u02aa\u02ab\3\2\2\2\u02ab\u02ac\3\2\2\2\u02ac\u02b0\5\u00a9U"+
- "\2\u02ad\u02af\5\u00bf`\2\u02ae\u02ad\3\2\2\2\u02af\u02b2\3\2\2\2\u02b0"+
- "\u02ae\3\2\2\2\u02b0\u02b1\3\2\2\2\u02b1\u02d2\3\2\2\2\u02b2\u02b0\3\2"+
- "\2\2\u02b3\u02b5\5\u00a9U\2\u02b4\u02b6\5\u00bf`\2\u02b5\u02b4\3\2\2\2"+
- "\u02b6\u02b7\3\2\2\2\u02b7\u02b5\3\2\2\2\u02b7\u02b8\3\2\2\2\u02b8\u02d2"+
- "\3\2\2\2\u02b9\u02bb\5\u00bf`\2\u02ba\u02b9\3\2\2\2\u02bb\u02bc\3\2\2"+
- "\2\u02bc\u02ba\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02c5\3\2\2\2\u02be\u02c2"+
- "\5\u00a9U\2\u02bf\u02c1\5\u00bf`\2\u02c0\u02bf\3\2\2\2\u02c1\u02c4\3\2"+
- "\2\2\u02c2\u02c0\3\2\2\2\u02c2\u02c3\3\2\2\2\u02c3\u02c6\3\2\2\2\u02c4"+
- "\u02c2\3\2\2\2\u02c5\u02be\3\2\2\2\u02c5\u02c6\3\2\2\2\u02c6\u02c7\3\2"+
- "\2\2\u02c7\u02c8\5\u00bd_\2\u02c8\u02d2\3\2\2\2\u02c9\u02cb\5\u00a9U\2"+
- "\u02ca\u02cc\5\u00bf`\2\u02cb\u02ca\3\2\2\2\u02cc\u02cd\3\2\2\2\u02cd"+
- "\u02cb\3\2\2\2\u02cd\u02ce\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d0\5\u00bd"+
- "_\2\u02d0\u02d2\3\2\2\2\u02d1\u02a8\3\2\2\2\u02d1\u02b3\3\2\2\2\u02d1"+
- "\u02ba\3\2\2\2\u02d1\u02c9\3\2\2\2\u02d2\u00b2\3\2\2\2\u02d3\u02d6\5\u00c1"+
- "a\2\u02d4\u02d6\7a\2\2\u02d5\u02d3\3\2\2\2\u02d5\u02d4\3\2\2\2\u02d6\u02dc"+
- "\3\2\2\2\u02d7\u02db\5\u00c1a\2\u02d8\u02db\5\u00bf`\2\u02d9\u02db\t\3"+
- "\2\2\u02da\u02d7\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02d9\3\2\2\2\u02db"+
- "\u02de\3\2\2\2\u02dc\u02da\3\2\2\2\u02dc\u02dd\3\2\2\2\u02dd\u00b4\3\2"+
- "\2\2\u02de\u02dc\3\2\2\2\u02df\u02e3\5\u00bf`\2\u02e0\u02e4\5\u00c1a\2"+
- "\u02e1\u02e4\5\u00bf`\2\u02e2\u02e4\t\4\2\2\u02e3\u02e0\3\2\2\2\u02e3"+
- "\u02e1\3\2\2\2\u02e3\u02e2\3\2\2\2\u02e4\u02e5\3\2\2\2\u02e5\u02e3\3\2"+
- "\2\2\u02e5\u02e6\3\2\2\2\u02e6\u00b6\3\2\2\2\u02e7\u02ec\5\u00c1a\2\u02e8"+
- "\u02ec\5\u00bf`\2\u02e9\u02ec\t\3\2\2\u02ea\u02ec\5\u00a1Q\2\u02eb\u02e7"+
- "\3\2\2\2\u02eb\u02e8\3\2\2\2\u02eb\u02e9\3\2\2\2\u02eb\u02ea\3\2\2\2\u02ec"+
- "\u02ed\3\2\2\2\u02ed\u02eb\3\2\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00b8\3\2"+
- "\2\2\u02ef\u02f5\7$\2\2\u02f0\u02f4\n\5\2\2\u02f1\u02f2\7$\2\2\u02f2\u02f4"+
- "\7$\2\2\u02f3\u02f0\3\2\2\2\u02f3\u02f1\3\2\2\2\u02f4\u02f7\3\2\2\2\u02f5"+
- "\u02f3\3\2\2\2\u02f5\u02f6\3\2\2\2\u02f6\u02f8\3\2\2\2\u02f7\u02f5\3\2"+
- "\2\2\u02f8\u02f9\7$\2\2\u02f9\u00ba\3\2\2\2\u02fa\u0300\7b\2\2\u02fb\u02ff"+
- "\n\6\2\2\u02fc\u02fd\7b\2\2\u02fd\u02ff\7b\2\2\u02fe\u02fb\3\2\2\2\u02fe"+
- "\u02fc\3\2\2\2\u02ff\u0302\3\2\2\2\u0300\u02fe\3\2\2\2\u0300\u0301\3\2"+
- "\2\2\u0301\u0303\3\2\2\2\u0302\u0300\3\2\2\2\u0303\u0304\7b\2\2\u0304"+
- "\u00bc\3\2\2\2\u0305\u0307\7G\2\2\u0306\u0308\t\7\2\2\u0307\u0306\3\2"+
- "\2\2\u0307\u0308\3\2\2\2\u0308\u030a\3\2\2\2\u0309\u030b\5\u00bf`\2\u030a"+
- "\u0309\3\2\2\2\u030b\u030c\3\2\2\2\u030c\u030a\3\2\2\2\u030c\u030d\3\2"+
- "\2\2\u030d\u00be\3\2\2\2\u030e\u030f\t\b\2\2\u030f\u00c0\3\2\2\2\u0310"+
- "\u0311\t\t\2\2\u0311\u00c2\3\2\2\2\u0312\u0313\7/\2\2\u0313\u0314\7/\2"+
- "\2\u0314\u0318\3\2\2\2\u0315\u0317\n\n\2\2\u0316\u0315\3\2\2\2\u0317\u031a"+
- "\3\2\2\2\u0318\u0316\3\2\2\2\u0318\u0319\3\2\2\2\u0319\u031c\3\2\2\2\u031a"+
- "\u0318\3\2\2\2\u031b\u031d\7\17\2\2\u031c\u031b\3\2\2\2\u031c\u031d\3"+
- "\2\2\2\u031d\u031f\3\2\2\2\u031e\u0320\7\f\2\2\u031f\u031e\3\2\2\2\u031f"+
- "\u0320\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u0322\bb\2\2\u0322\u00c4\3\2"+
- "\2\2\u0323\u0324\7\61\2\2\u0324\u0325\7,\2\2\u0325\u032a\3\2\2\2\u0326"+
- "\u0329\5\u00c5c\2\u0327\u0329\13\2\2\2\u0328\u0326\3\2\2\2\u0328\u0327"+
- "\3\2\2\2\u0329\u032c\3\2\2\2\u032a\u032b\3\2\2\2\u032a\u0328\3\2\2\2\u032b"+
- "\u032d\3\2\2\2\u032c\u032a\3\2\2\2\u032d\u032e\7,\2\2\u032e\u032f\7\61"+
- "\2\2\u032f\u0330\3\2\2\2\u0330\u0331\bc\2\2\u0331\u00c6\3\2\2\2\u0332"+
- "\u0334\t\13\2\2\u0333\u0332\3\2\2\2\u0334\u0335\3\2\2\2\u0335\u0333\3"+
- "\2\2\2\u0335\u0336\3\2\2\2\u0336\u0337\3\2\2\2\u0337\u0338\bd\2\2\u0338"+
- "\u00c8\3\2\2\2\u0339\u033a\13\2\2\2\u033a\u00ca\3\2\2\2\"\2\u027a\u029b"+
- "\u029d\u02a5\u02aa\u02b0\u02b7\u02bc\u02c2\u02c5\u02cd\u02d1\u02d5\u02da"+
- "\u02dc\u02e3\u02e5\u02eb\u02ed\u02f3\u02f5\u02fe\u0300\u0307\u030c\u0318"+
- "\u031c\u031f\u0328\u032a\u0335\3\2\3\2";
+ "\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2"+
+ "\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00d3"+
+ "\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\3\u00db\3\2\2"+
+ "\2\5\u00dd\3\2\2\2\7\u00df\3\2\2\2\t\u00e1\3\2\2\2\13\u00e3\3\2\2\2\r"+
+ "\u00e7\3\2\2\2\17\u00ef\3\2\2\2\21\u00f8\3\2\2\2\23\u00fc\3\2\2\2\25\u0100"+
+ "\3\2\2\2\27\u0103\3\2\2\2\31\u0107\3\2\2\2\33\u010f\3\2\2\2\35\u0112\3"+
+ "\2\2\2\37\u0117\3\2\2\2!\u011f\3\2\2\2#\u0128\3\2\2\2%\u0130\3\2\2\2\'"+
+ "\u0136\3\2\2\2)\u013b\3\2\2\2+\u0144\3\2\2\2-\u014d\3\2\2\2/\u0154\3\2"+
+ "\2\2\61\u015f\3\2\2\2\63\u0166\3\2\2\2\65\u016e\3\2\2\2\67\u0176\3\2\2"+
+ "\29\u017c\3\2\2\2;\u0183\3\2\2\2=\u0188\3\2\2\2?\u018d\3\2\2\2A\u0197"+
+ "\3\2\2\2C\u01a0\3\2\2\2E\u01a6\3\2\2\2G\u01ad\3\2\2\2I\u01b0\3\2\2\2K"+
+ "\u01b6\3\2\2\2M\u01b9\3\2\2\2O\u01be\3\2\2\2Q\u01c3\3\2\2\2S\u01c8\3\2"+
+ "\2\2U\u01ce\3\2\2\2W\u01d5\3\2\2\2Y\u01db\3\2\2\2[\u01e3\3\2\2\2]\u01e7"+
+ "\3\2\2\2_\u01ec\3\2\2\2a\u01ef\3\2\2\2c\u01f9\3\2\2\2e\u01fc\3\2\2\2g"+
+ "\u0202\3\2\2\2i\u0208\3\2\2\2k\u020f\3\2\2\2m\u0218\3\2\2\2o\u021d\3\2"+
+ "\2\2q\u0223\3\2\2\2s\u0229\3\2\2\2u\u022f\3\2\2\2w\u0237\3\2\2\2y\u023e"+
+ "\3\2\2\2{\u0243\3\2\2\2}\u0247\3\2\2\2\177\u024d\3\2\2\2\u0081\u0254\3"+
+ "\2\2\2\u0083\u0259\3\2\2\2\u0085\u025e\3\2\2\2\u0087\u0263\3\2\2\2\u0089"+
+ "\u0269\3\2\2\2\u008b\u026f\3\2\2\2\u008d\u0276\3\2\2\2\u008f\u027c\3\2"+
+ "\2\2\u0091\u0281\3\2\2\2\u0093\u0289\3\2\2\2\u0095\u028d\3\2\2\2\u0097"+
+ "\u0294\3\2\2\2\u0099\u0297\3\2\2\2\u009b\u029a\3\2\2\2\u009d\u029e\3\2"+
+ "\2\2\u009f\u02a4\3\2\2\2\u00a1\u02a6\3\2\2\2\u00a3\u02af\3\2\2\2\u00a5"+
+ "\u02b1\3\2\2\2\u00a7\u02b3\3\2\2\2\u00a9\u02b6\3\2\2\2\u00ab\u02b8\3\2"+
+ "\2\2\u00ad\u02bb\3\2\2\2\u00af\u02bd\3\2\2\2\u00b1\u02bf\3\2\2\2\u00b3"+
+ "\u02c1\3\2\2\2\u00b5\u02c3\3\2\2\2\u00b7\u02c5\3\2\2\2\u00b9\u02c8\3\2"+
+ "\2\2\u00bb\u02ca\3\2\2\2\u00bd\u02cc\3\2\2\2\u00bf\u02d8\3\2\2\2\u00c1"+
+ "\u0306\3\2\2\2\u00c3\u030a\3\2\2\2\u00c5\u0314\3\2\2\2\u00c7\u0320\3\2"+
+ "\2\2\u00c9\u0324\3\2\2\2\u00cb\u032f\3\2\2\2\u00cd\u033a\3\2\2\2\u00cf"+
+ "\u0343\3\2\2\2\u00d1\u0345\3\2\2\2\u00d3\u0347\3\2\2\2\u00d5\u0358\3\2"+
+ "\2\2\u00d7\u0368\3\2\2\2\u00d9\u036e\3\2\2\2\u00db\u00dc\7*\2\2\u00dc"+
+ "\4\3\2\2\2\u00dd\u00de\7+\2\2\u00de\6\3\2\2\2\u00df\u00e0\7.\2\2\u00e0"+
+ "\b\3\2\2\2\u00e1\u00e2\7<\2\2\u00e2\n\3\2\2\2\u00e3\u00e4\7C\2\2\u00e4"+
+ "\u00e5\7N\2\2\u00e5\u00e6\7N\2\2\u00e6\f\3\2\2\2\u00e7\u00e8\7C\2\2\u00e8"+
+ "\u00e9\7P\2\2\u00e9\u00ea\7C\2\2\u00ea\u00eb\7N\2\2\u00eb\u00ec\7[\2\2"+
+ "\u00ec\u00ed\7\\\2\2\u00ed\u00ee\7G\2\2\u00ee\16\3\2\2\2\u00ef\u00f0\7"+
+ "C\2\2\u00f0\u00f1\7P\2\2\u00f1\u00f2\7C\2\2\u00f2\u00f3\7N\2\2\u00f3\u00f4"+
+ "\7[\2\2\u00f4\u00f5\7\\\2\2\u00f5\u00f6\7G\2\2\u00f6\u00f7\7F\2\2\u00f7"+
+ "\20\3\2\2\2\u00f8\u00f9\7C\2\2\u00f9\u00fa\7P\2\2\u00fa\u00fb\7F\2\2\u00fb"+
+ "\22\3\2\2\2\u00fc\u00fd\7C\2\2\u00fd\u00fe\7P\2\2\u00fe\u00ff\7[\2\2\u00ff"+
+ "\24\3\2\2\2\u0100\u0101\7C\2\2\u0101\u0102\7U\2\2\u0102\26\3\2\2\2\u0103"+
+ "\u0104\7C\2\2\u0104\u0105\7U\2\2\u0105\u0106\7E\2\2\u0106\30\3\2\2\2\u0107"+
+ "\u0108\7D\2\2\u0108\u0109\7G\2\2\u0109\u010a\7V\2\2\u010a\u010b\7Y\2\2"+
+ "\u010b\u010c\7G\2\2\u010c\u010d\7G\2\2\u010d\u010e\7P\2\2\u010e\32\3\2"+
+ "\2\2\u010f\u0110\7D\2\2\u0110\u0111\7[\2\2\u0111\34\3\2\2\2\u0112\u0113"+
+ "\7E\2\2\u0113\u0114\7C\2\2\u0114\u0115\7U\2\2\u0115\u0116\7V\2\2\u0116"+
+ "\36\3\2\2\2\u0117\u0118\7E\2\2\u0118\u0119\7C\2\2\u0119\u011a\7V\2\2\u011a"+
+ "\u011b\7C\2\2\u011b\u011c\7N\2\2\u011c\u011d\7Q\2\2\u011d\u011e\7I\2\2"+
+ "\u011e \3\2\2\2\u011f\u0120\7E\2\2\u0120\u0121\7C\2\2\u0121\u0122\7V\2"+
+ "\2\u0122\u0123\7C\2\2\u0123\u0124\7N\2\2\u0124\u0125\7Q\2\2\u0125\u0126"+
+ "\7I\2\2\u0126\u0127\7U\2\2\u0127\"\3\2\2\2\u0128\u0129\7E\2\2\u0129\u012a"+
+ "\7Q\2\2\u012a\u012b\7N\2\2\u012b\u012c\7W\2\2\u012c\u012d\7O\2\2\u012d"+
+ "\u012e\7P\2\2\u012e\u012f\7U\2\2\u012f$\3\2\2\2\u0130\u0131\7F\2\2\u0131"+
+ "\u0132\7G\2\2\u0132\u0133\7D\2\2\u0133\u0134\7W\2\2\u0134\u0135\7I\2\2"+
+ "\u0135&\3\2\2\2\u0136\u0137\7F\2\2\u0137\u0138\7G\2\2\u0138\u0139\7U\2"+
+ "\2\u0139\u013a\7E\2\2\u013a(\3\2\2\2\u013b\u013c\7F\2\2\u013c\u013d\7"+
+ "G\2\2\u013d\u013e\7U\2\2\u013e\u013f\7E\2\2\u013f\u0140\7T\2\2\u0140\u0141"+
+ "\7K\2\2\u0141\u0142\7D\2\2\u0142\u0143\7G\2\2\u0143*\3\2\2\2\u0144\u0145"+
+ "\7F\2\2\u0145\u0146\7K\2\2\u0146\u0147\7U\2\2\u0147\u0148\7V\2\2\u0148"+
+ "\u0149\7K\2\2\u0149\u014a\7P\2\2\u014a\u014b\7E\2\2\u014b\u014c\7V\2\2"+
+ "\u014c,\3\2\2\2\u014d\u014e\7G\2\2\u014e\u014f\7U\2\2\u014f\u0150\7E\2"+
+ "\2\u0150\u0151\7C\2\2\u0151\u0152\7R\2\2\u0152\u0153\7G\2\2\u0153.\3\2"+
+ "\2\2\u0154\u0155\7G\2\2\u0155\u0156\7Z\2\2\u0156\u0157\7G\2\2\u0157\u0158"+
+ "\7E\2\2\u0158\u0159\7W\2\2\u0159\u015a\7V\2\2\u015a\u015b\7C\2\2\u015b"+
+ "\u015c\7D\2\2\u015c\u015d\7N\2\2\u015d\u015e\7G\2\2\u015e\60\3\2\2\2\u015f"+
+ "\u0160\7G\2\2\u0160\u0161\7Z\2\2\u0161\u0162\7K\2\2\u0162\u0163\7U\2\2"+
+ "\u0163\u0164\7V\2\2\u0164\u0165\7U\2\2\u0165\62\3\2\2\2\u0166\u0167\7"+
+ "G\2\2\u0167\u0168\7Z\2\2\u0168\u0169\7R\2\2\u0169\u016a\7N\2\2\u016a\u016b"+
+ "\7C\2\2\u016b\u016c\7K\2\2\u016c\u016d\7P\2\2\u016d\64\3\2\2\2\u016e\u016f"+
+ "\7G\2\2\u016f\u0170\7Z\2\2\u0170\u0171\7V\2\2\u0171\u0172\7T\2\2\u0172"+
+ "\u0173\7C\2\2\u0173\u0174\7E\2\2\u0174\u0175\7V\2\2\u0175\66\3\2\2\2\u0176"+
+ "\u0177\7H\2\2\u0177\u0178\7C\2\2\u0178\u0179\7N\2\2\u0179\u017a\7U\2\2"+
+ "\u017a\u017b\7G\2\2\u017b8\3\2\2\2\u017c\u017d\7H\2\2\u017d\u017e\7Q\2"+
+ "\2\u017e\u017f\7T\2\2\u017f\u0180\7O\2\2\u0180\u0181\7C\2\2\u0181\u0182"+
+ "\7V\2\2\u0182:\3\2\2\2\u0183\u0184\7H\2\2\u0184\u0185\7T\2\2\u0185\u0186"+
+ "\7Q\2\2\u0186\u0187\7O\2\2\u0187<\3\2\2\2\u0188\u0189\7H\2\2\u0189\u018a"+
+ "\7W\2\2\u018a\u018b\7N\2\2\u018b\u018c\7N\2\2\u018c>\3\2\2\2\u018d\u018e"+
+ "\7H\2\2\u018e\u018f\7W\2\2\u018f\u0190\7P\2\2\u0190\u0191\7E\2\2\u0191"+
+ "\u0192\7V\2\2\u0192\u0193\7K\2\2\u0193\u0194\7Q\2\2\u0194\u0195\7P\2\2"+
+ "\u0195\u0196\7U\2\2\u0196@\3\2\2\2\u0197\u0198\7I\2\2\u0198\u0199\7T\2"+
+ "\2\u0199\u019a\7C\2\2\u019a\u019b\7R\2\2\u019b\u019c\7J\2\2\u019c\u019d"+
+ "\7X\2\2\u019d\u019e\7K\2\2\u019e\u019f\7\\\2\2\u019fB\3\2\2\2\u01a0\u01a1"+
+ "\7I\2\2\u01a1\u01a2\7T\2\2\u01a2\u01a3\7Q\2\2\u01a3\u01a4\7W\2\2\u01a4"+
+ "\u01a5\7R\2\2\u01a5D\3\2\2\2\u01a6\u01a7\7J\2\2\u01a7\u01a8\7C\2\2\u01a8"+
+ "\u01a9\7X\2\2\u01a9\u01aa\7K\2\2\u01aa\u01ab\7P\2\2\u01ab\u01ac\7I\2\2"+
+ "\u01acF\3\2\2\2\u01ad\u01ae\7K\2\2\u01ae\u01af\7P\2\2\u01afH\3\2\2\2\u01b0"+
+ "\u01b1\7K\2\2\u01b1\u01b2\7P\2\2\u01b2\u01b3\7P\2\2\u01b3\u01b4\7G\2\2"+
+ "\u01b4\u01b5\7T\2\2\u01b5J\3\2\2\2\u01b6\u01b7\7K\2\2\u01b7\u01b8\7U\2"+
+ "\2\u01b8L\3\2\2\2\u01b9\u01ba\7L\2\2\u01ba\u01bb\7Q\2\2\u01bb\u01bc\7"+
+ "K\2\2\u01bc\u01bd\7P\2\2\u01bdN\3\2\2\2\u01be\u01bf\7N\2\2\u01bf\u01c0"+
+ "\7G\2\2\u01c0\u01c1\7H\2\2\u01c1\u01c2\7V\2\2\u01c2P\3\2\2\2\u01c3\u01c4"+
+ "\7N\2\2\u01c4\u01c5\7K\2\2\u01c5\u01c6\7M\2\2\u01c6\u01c7\7G\2\2\u01c7"+
+ "R\3\2\2\2\u01c8\u01c9\7N\2\2\u01c9\u01ca\7K\2\2\u01ca\u01cb\7O\2\2\u01cb"+
+ "\u01cc\7K\2\2\u01cc\u01cd\7V\2\2\u01cdT\3\2\2\2\u01ce\u01cf\7O\2\2\u01cf"+
+ "\u01d0\7C\2\2\u01d0\u01d1\7R\2\2\u01d1\u01d2\7R\2\2\u01d2\u01d3\7G\2\2"+
+ "\u01d3\u01d4\7F\2\2\u01d4V\3\2\2\2\u01d5\u01d6\7O\2\2\u01d6\u01d7\7C\2"+
+ "\2\u01d7\u01d8\7V\2\2\u01d8\u01d9\7E\2\2\u01d9\u01da\7J\2\2\u01daX\3\2"+
+ "\2\2\u01db\u01dc\7P\2\2\u01dc\u01dd\7C\2\2\u01dd\u01de\7V\2\2\u01de\u01df"+
+ "\7W\2\2\u01df\u01e0\7T\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7N\2\2\u01e2"+
+ "Z\3\2\2\2\u01e3\u01e4\7P\2\2\u01e4\u01e5\7Q\2\2\u01e5\u01e6\7V\2\2\u01e6"+
+ "\\\3\2\2\2\u01e7\u01e8\7P\2\2\u01e8\u01e9\7W\2\2\u01e9\u01ea\7N\2\2\u01ea"+
+ "\u01eb\7N\2\2\u01eb^\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee\7P\2\2\u01ee"+
+ "`\3\2\2\2\u01ef\u01f0\7Q\2\2\u01f0\u01f1\7R\2\2\u01f1\u01f2\7V\2\2\u01f2"+
+ "\u01f3\7K\2\2\u01f3\u01f4\7O\2\2\u01f4\u01f5\7K\2\2\u01f5\u01f6\7\\\2"+
+ "\2\u01f6\u01f7\7G\2\2\u01f7\u01f8\7F\2\2\u01f8b\3\2\2\2\u01f9\u01fa\7"+
+ "Q\2\2\u01fa\u01fb\7T\2\2\u01fbd\3\2\2\2\u01fc\u01fd\7Q\2\2\u01fd\u01fe"+
+ "\7T\2\2\u01fe\u01ff\7F\2\2\u01ff\u0200\7G\2\2\u0200\u0201\7T\2\2\u0201"+
+ "f\3\2\2\2\u0202\u0203\7Q\2\2\u0203\u0204\7W\2\2\u0204\u0205\7V\2\2\u0205"+
+ "\u0206\7G\2\2\u0206\u0207\7T\2\2\u0207h\3\2\2\2\u0208\u0209\7R\2\2\u0209"+
+ "\u020a\7C\2\2\u020a\u020b\7T\2\2\u020b\u020c\7U\2\2\u020c\u020d\7G\2\2"+
+ "\u020d\u020e\7F\2\2\u020ej\3\2\2\2\u020f\u0210\7R\2\2\u0210\u0211\7J\2"+
+ "\2\u0211\u0212\7[\2\2\u0212\u0213\7U\2\2\u0213\u0214\7K\2\2\u0214\u0215"+
+ "\7E\2\2\u0215\u0216\7C\2\2\u0216\u0217\7N\2\2\u0217l\3\2\2\2\u0218\u0219"+
+ "\7R\2\2\u0219\u021a\7N\2\2\u021a\u021b\7C\2\2\u021b\u021c\7P\2\2\u021c"+
+ "n\3\2\2\2\u021d\u021e\7T\2\2\u021e\u021f\7K\2\2\u021f\u0220\7I\2\2\u0220"+
+ "\u0221\7J\2\2\u0221\u0222\7V\2\2\u0222p\3\2\2\2\u0223\u0224\7T\2\2\u0224"+
+ "\u0225\7N\2\2\u0225\u0226\7K\2\2\u0226\u0227\7M\2\2\u0227\u0228\7G\2\2"+
+ "\u0228r\3\2\2\2\u0229\u022a\7S\2\2\u022a\u022b\7W\2\2\u022b\u022c\7G\2"+
+ "\2\u022c\u022d\7T\2\2\u022d\u022e\7[\2\2\u022et\3\2\2\2\u022f\u0230\7"+
+ "U\2\2\u0230\u0231\7E\2\2\u0231\u0232\7J\2\2\u0232\u0233\7G\2\2\u0233\u0234"+
+ "\7O\2\2\u0234\u0235\7C\2\2\u0235\u0236\7U\2\2\u0236v\3\2\2\2\u0237\u0238"+
+ "\7U\2\2\u0238\u0239\7G\2\2\u0239\u023a\7N\2\2\u023a\u023b\7G\2\2\u023b"+
+ "\u023c\7E\2\2\u023c\u023d\7V\2\2\u023dx\3\2\2\2\u023e\u023f\7U\2\2\u023f"+
+ "\u0240\7J\2\2\u0240\u0241\7Q\2\2\u0241\u0242\7Y\2\2\u0242z\3\2\2\2\u0243"+
+ "\u0244\7U\2\2\u0244\u0245\7[\2\2\u0245\u0246\7U\2\2\u0246|\3\2\2\2\u0247"+
+ "\u0248\7V\2\2\u0248\u0249\7C\2\2\u0249\u024a\7D\2\2\u024a\u024b\7N\2\2"+
+ "\u024b\u024c\7G\2\2\u024c~\3\2\2\2\u024d\u024e\7V\2\2\u024e\u024f\7C\2"+
+ "\2\u024f\u0250\7D\2\2\u0250\u0251\7N\2\2\u0251\u0252\7G\2\2\u0252\u0253"+
+ "\7U\2\2\u0253\u0080\3\2\2\2\u0254\u0255\7V\2\2\u0255\u0256\7G\2\2\u0256"+
+ "\u0257\7Z\2\2\u0257\u0258\7V\2\2\u0258\u0082\3\2\2\2\u0259\u025a\7V\2"+
+ "\2\u025a\u025b\7T\2\2\u025b\u025c\7W\2\2\u025c\u025d\7G\2\2\u025d\u0084"+
+ "\3\2\2\2\u025e\u025f\7V\2\2\u025f\u0260\7[\2\2\u0260\u0261\7R\2\2\u0261"+
+ "\u0262\7G\2\2\u0262\u0086\3\2\2\2\u0263\u0264\7V\2\2\u0264\u0265\7[\2"+
+ "\2\u0265\u0266\7R\2\2\u0266\u0267\7G\2\2\u0267\u0268\7U\2\2\u0268\u0088"+
+ "\3\2\2\2\u0269\u026a\7W\2\2\u026a\u026b\7U\2\2\u026b\u026c\7K\2\2\u026c"+
+ "\u026d\7P\2\2\u026d\u026e\7I\2\2\u026e\u008a\3\2\2\2\u026f\u0270\7X\2"+
+ "\2\u0270\u0271\7G\2\2\u0271\u0272\7T\2\2\u0272\u0273\7K\2\2\u0273\u0274"+
+ "\7H\2\2\u0274\u0275\7[\2\2\u0275\u008c\3\2\2\2\u0276\u0277\7Y\2\2\u0277"+
+ "\u0278\7J\2\2\u0278\u0279\7G\2\2\u0279\u027a\7T\2\2\u027a\u027b\7G\2\2"+
+ "\u027b\u008e\3\2\2\2\u027c\u027d\7Y\2\2\u027d\u027e\7K\2\2\u027e\u027f"+
+ "\7V\2\2\u027f\u0280\7J\2\2\u0280\u0090\3\2\2\2\u0281\u0282\7}\2\2\u0282"+
+ "\u0283\7G\2\2\u0283\u0284\7U\2\2\u0284\u0285\7E\2\2\u0285\u0286\7C\2\2"+
+ "\u0286\u0287\7R\2\2\u0287\u0288\7G\2\2\u0288\u0092\3\2\2\2\u0289\u028a"+
+ "\7}\2\2\u028a\u028b\7H\2\2\u028b\u028c\7P\2\2\u028c\u0094\3\2\2\2\u028d"+
+ "\u028e\7}\2\2\u028e\u028f\7N\2\2\u028f\u0290\7K\2\2\u0290\u0291\7O\2\2"+
+ "\u0291\u0292\7K\2\2\u0292\u0293\7V\2\2\u0293\u0096\3\2\2\2\u0294\u0295"+
+ "\7}\2\2\u0295\u0296\7F\2\2\u0296\u0098\3\2\2\2\u0297\u0298\7}\2\2\u0298"+
+ "\u0299\7V\2\2\u0299\u009a\3\2\2\2\u029a\u029b\7}\2\2\u029b\u029c\7V\2"+
+ "\2\u029c\u029d\7U\2\2\u029d\u009c\3\2\2\2\u029e\u029f\7}\2\2\u029f\u02a0"+
+ "\7I\2\2\u02a0\u02a1\7W\2\2\u02a1\u02a2\7K\2\2\u02a2\u02a3\7F\2\2\u02a3"+
+ "\u009e\3\2\2\2\u02a4\u02a5\7\177\2\2\u02a5\u00a0\3\2\2\2\u02a6\u02a7\7"+
+ "?\2\2\u02a7\u00a2\3\2\2\2\u02a8\u02a9\7>\2\2\u02a9\u02b0\7@\2\2\u02aa"+
+ "\u02ab\7#\2\2\u02ab\u02b0\7?\2\2\u02ac\u02ad\7>\2\2\u02ad\u02ae\7?\2\2"+
+ "\u02ae\u02b0\7@\2\2\u02af\u02a8\3\2\2\2\u02af\u02aa\3\2\2\2\u02af\u02ac"+
+ "\3\2\2\2\u02b0\u00a4\3\2\2\2\u02b1\u02b2\7>\2\2\u02b2\u00a6\3\2\2\2\u02b3"+
+ "\u02b4\7>\2\2\u02b4\u02b5\7?\2\2\u02b5\u00a8\3\2\2\2\u02b6\u02b7\7@\2"+
+ "\2\u02b7\u00aa\3\2\2\2\u02b8\u02b9\7@\2\2\u02b9\u02ba\7?\2\2\u02ba\u00ac"+
+ "\3\2\2\2\u02bb\u02bc\7-\2\2\u02bc\u00ae\3\2\2\2\u02bd\u02be\7/\2\2\u02be"+
+ "\u00b0\3\2\2\2\u02bf\u02c0\7,\2\2\u02c0\u00b2\3\2\2\2\u02c1\u02c2\7\61"+
+ "\2\2\u02c2\u00b4\3\2\2\2\u02c3\u02c4\7\'\2\2\u02c4\u00b6\3\2\2\2\u02c5"+
+ "\u02c6\7~\2\2\u02c6\u02c7\7~\2\2\u02c7\u00b8\3\2\2\2\u02c8\u02c9\7\60"+
+ "\2\2\u02c9\u00ba\3\2\2\2\u02ca\u02cb\7A\2\2\u02cb\u00bc\3\2\2\2\u02cc"+
+ "\u02d2\7)\2\2\u02cd\u02d1\n\2\2\2\u02ce\u02cf\7)\2\2\u02cf\u02d1\7)\2"+
+ "\2\u02d0\u02cd\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d4\3\2\2\2\u02d2\u02d0"+
+ "\3\2\2\2\u02d2\u02d3\3\2\2\2\u02d3\u02d5\3\2\2\2\u02d4\u02d2\3\2\2\2\u02d5"+
+ "\u02d6\7)\2\2\u02d6\u00be\3\2\2\2\u02d7\u02d9\5\u00cfh\2\u02d8\u02d7\3"+
+ "\2\2\2\u02d9\u02da\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02db\3\2\2\2\u02db"+
+ "\u00c0\3\2\2\2\u02dc\u02de\5\u00cfh\2\u02dd\u02dc\3\2\2\2\u02de\u02df"+
+ "\3\2\2\2\u02df\u02dd\3\2\2\2\u02df\u02e0\3\2\2\2\u02e0\u02e1\3\2\2\2\u02e1"+
+ "\u02e5\5\u00b9]\2\u02e2\u02e4\5\u00cfh\2\u02e3\u02e2\3\2\2\2\u02e4\u02e7"+
+ "\3\2\2\2\u02e5\u02e3\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6\u0307\3\2\2\2\u02e7"+
+ "\u02e5\3\2\2\2\u02e8\u02ea\5\u00b9]\2\u02e9\u02eb\5\u00cfh\2\u02ea\u02e9"+
+ "\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ea\3\2\2\2\u02ec\u02ed\3\2\2\2\u02ed"+
+ "\u0307\3\2\2\2\u02ee\u02f0\5\u00cfh\2\u02ef\u02ee\3\2\2\2\u02f0\u02f1"+
+ "\3\2\2\2\u02f1\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2\u02fa\3\2\2\2\u02f3"+
+ "\u02f7\5\u00b9]\2\u02f4\u02f6\5\u00cfh\2\u02f5\u02f4\3\2\2\2\u02f6\u02f9"+
+ "\3\2\2\2\u02f7\u02f5\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8\u02fb\3\2\2\2\u02f9"+
+ "\u02f7\3\2\2\2\u02fa\u02f3\3\2\2\2\u02fa\u02fb\3\2\2\2\u02fb\u02fc\3\2"+
+ "\2\2\u02fc\u02fd\5\u00cdg\2\u02fd\u0307\3\2\2\2\u02fe\u0300\5\u00b9]\2"+
+ "\u02ff\u0301\5\u00cfh\2\u0300\u02ff\3\2\2\2\u0301\u0302\3\2\2\2\u0302"+
+ "\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0304\3\2\2\2\u0304\u0305\5\u00cd"+
+ "g\2\u0305\u0307\3\2\2\2\u0306\u02dd\3\2\2\2\u0306\u02e8\3\2\2\2\u0306"+
+ "\u02ef\3\2\2\2\u0306\u02fe\3\2\2\2\u0307\u00c2\3\2\2\2\u0308\u030b\5\u00d1"+
+ "i\2\u0309\u030b\7a\2\2\u030a\u0308\3\2\2\2\u030a\u0309\3\2\2\2\u030b\u0311"+
+ "\3\2\2\2\u030c\u0310\5\u00d1i\2\u030d\u0310\5\u00cfh\2\u030e\u0310\t\3"+
+ "\2\2\u030f\u030c\3\2\2\2\u030f\u030d\3\2\2\2\u030f\u030e\3\2\2\2\u0310"+
+ "\u0313\3\2\2\2\u0311\u030f\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u00c4\3\2"+
+ "\2\2\u0313\u0311\3\2\2\2\u0314\u0318\5\u00cfh\2\u0315\u0319\5\u00d1i\2"+
+ "\u0316\u0319\5\u00cfh\2\u0317\u0319\t\4\2\2\u0318\u0315\3\2\2\2\u0318"+
+ "\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u0318\3\2"+
+ "\2\2\u031a\u031b\3\2\2\2\u031b\u00c6\3\2\2\2\u031c\u0321\5\u00d1i\2\u031d"+
+ "\u0321\5\u00cfh\2\u031e\u0321\t\3\2\2\u031f\u0321\5\u00b1Y\2\u0320\u031c"+
+ "\3\2\2\2\u0320\u031d\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u031f\3\2\2\2\u0321"+
+ "\u0322\3\2\2\2\u0322\u0320\3\2\2\2\u0322\u0323\3\2\2\2\u0323\u00c8\3\2"+
+ "\2\2\u0324\u032a\7$\2\2\u0325\u0329\n\5\2\2\u0326\u0327\7$\2\2\u0327\u0329"+
+ "\7$\2\2\u0328\u0325\3\2\2\2\u0328\u0326\3\2\2\2\u0329\u032c\3\2\2\2\u032a"+
+ "\u0328\3\2\2\2\u032a\u032b\3\2\2\2\u032b\u032d\3\2\2\2\u032c\u032a\3\2"+
+ "\2\2\u032d\u032e\7$\2\2\u032e\u00ca\3\2\2\2\u032f\u0335\7b\2\2\u0330\u0334"+
+ "\n\6\2\2\u0331\u0332\7b\2\2\u0332\u0334\7b\2\2\u0333\u0330\3\2\2\2\u0333"+
+ "\u0331\3\2\2\2\u0334\u0337\3\2\2\2\u0335\u0333\3\2\2\2\u0335\u0336\3\2"+
+ "\2\2\u0336\u0338\3\2\2\2\u0337\u0335\3\2\2\2\u0338\u0339\7b\2\2\u0339"+
+ "\u00cc\3\2\2\2\u033a\u033c\7G\2\2\u033b\u033d\t\7\2\2\u033c\u033b\3\2"+
+ "\2\2\u033c\u033d\3\2\2\2\u033d\u033f\3\2\2\2\u033e\u0340\5\u00cfh\2\u033f"+
+ "\u033e\3\2\2\2\u0340\u0341\3\2\2\2\u0341\u033f\3\2\2\2\u0341\u0342\3\2"+
+ "\2\2\u0342\u00ce\3\2\2\2\u0343\u0344\t\b\2\2\u0344\u00d0\3\2\2\2\u0345"+
+ "\u0346\t\t\2\2\u0346\u00d2\3\2\2\2\u0347\u0348\7/\2\2\u0348\u0349\7/\2"+
+ "\2\u0349\u034d\3\2\2\2\u034a\u034c\n\n\2\2\u034b\u034a\3\2\2\2\u034c\u034f"+
+ "\3\2\2\2\u034d\u034b\3\2\2\2\u034d\u034e\3\2\2\2\u034e\u0351\3\2\2\2\u034f"+
+ "\u034d\3\2\2\2\u0350\u0352\7\17\2\2\u0351\u0350\3\2\2\2\u0351\u0352\3"+
+ "\2\2\2\u0352\u0354\3\2\2\2\u0353\u0355\7\f\2\2\u0354\u0353\3\2\2\2\u0354"+
+ "\u0355\3\2\2\2\u0355\u0356\3\2\2\2\u0356\u0357\bj\2\2\u0357\u00d4\3\2"+
+ "\2\2\u0358\u0359\7\61\2\2\u0359\u035a\7,\2\2\u035a\u035f\3\2\2\2\u035b"+
+ "\u035e\5\u00d5k\2\u035c\u035e\13\2\2\2\u035d\u035b\3\2\2\2\u035d\u035c"+
+ "\3\2\2\2\u035e\u0361\3\2\2\2\u035f\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360"+
+ "\u0362\3\2\2\2\u0361\u035f\3\2\2\2\u0362\u0363\7,\2\2\u0363\u0364\7\61"+
+ "\2\2\u0364\u0365\3\2\2\2\u0365\u0366\bk\2\2\u0366\u00d6\3\2\2\2\u0367"+
+ "\u0369\t\13\2\2\u0368\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a\u0368\3"+
+ "\2\2\2\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\bl\2\2\u036d"+
+ "\u00d8\3\2\2\2\u036e\u036f\13\2\2\2\u036f\u00da\3\2\2\2\"\2\u02af\u02d0"+
+ "\u02d2\u02da\u02df\u02e5\u02ec\u02f1\u02f7\u02fa\u0302\u0306\u030a\u030f"+
+ "\u0311\u0318\u031a\u0320\u0322\u0328\u032a\u0333\u0335\u033c\u0341\u034d"+
+ "\u0351\u0354\u035d\u035f\u036a\3\2\3\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java
index 48f6eb4a7c8..70fe777384c 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.ParseTreeListener;
@@ -196,6 +191,16 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#limitClause}.
+ * @param ctx the parse tree
+ */
+ void enterLimitClause(SqlBaseParser.LimitClauseContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#limitClause}.
+ * @param ctx the parse tree
+ */
+ void exitLimitClause(SqlBaseParser.LimitClauseContext ctx);
/**
* Enter a parse tree produced by the {@code queryPrimaryDefault}
* labeled alternative in {@link SqlBaseParser#queryTerm}.
@@ -514,6 +519,16 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitPattern(SqlBaseParser.PatternContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#patternEscape}.
+ * @param ctx the parse tree
+ */
+ void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#patternEscape}.
+ * @param ctx the parse tree
+ */
+ void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
/**
* Enter a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link SqlBaseParser#valueExpression}.
@@ -611,17 +626,17 @@ interface SqlBaseListener extends ParseTreeListener {
*/
void exitStar(SqlBaseParser.StarContext ctx);
/**
- * Enter a parse tree produced by the {@code functionCall}
+ * Enter a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree
*/
- void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx);
+ void enterFunction(SqlBaseParser.FunctionContext ctx);
/**
- * Exit a parse tree produced by the {@code functionCall}
+ * Exit a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree
*/
- void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx);
+ void exitFunction(SqlBaseParser.FunctionContext ctx);
/**
* Enter a parse tree produced by the {@code subqueryExpression}
* labeled alternative in {@link SqlBaseParser#primaryExpression}.
@@ -670,6 +685,66 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#castExpression}.
+ * @param ctx the parse tree
+ */
+ void enterCastExpression(SqlBaseParser.CastExpressionContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#castExpression}.
+ * @param ctx the parse tree
+ */
+ void exitCastExpression(SqlBaseParser.CastExpressionContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#castTemplate}.
+ * @param ctx the parse tree
+ */
+ void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#castTemplate}.
+ * @param ctx the parse tree
+ */
+ void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#extractExpression}.
+ * @param ctx the parse tree
+ */
+ void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#extractExpression}.
+ * @param ctx the parse tree
+ */
+ void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#extractTemplate}.
+ * @param ctx the parse tree
+ */
+ void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#extractTemplate}.
+ * @param ctx the parse tree
+ */
+ void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#functionExpression}.
+ * @param ctx the parse tree
+ */
+ void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#functionExpression}.
+ * @param ctx the parse tree
+ */
+ void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
+ /**
+ * Enter a parse tree produced by {@link SqlBaseParser#functionTemplate}.
+ * @param ctx the parse tree
+ */
+ void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
+ /**
+ * Exit a parse tree produced by {@link SqlBaseParser#functionTemplate}.
+ * @param ctx the parse tree
+ */
+ void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
/**
* Enter a parse tree produced by the {@code nullLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
@@ -730,6 +805,54 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree
*/
void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code dateEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code dateEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code timeEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code timeEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code timestampEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code timestampEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
+ /**
+ * Enter a parse tree produced by the {@code guidEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
+ /**
+ * Exit a parse tree produced by the {@code guidEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ */
+ void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#comparisonOperator}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
index 3535977943b..32a1b062fee 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java
@@ -1,15 +1,13 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
+import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
+import java.util.Iterator;
+import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
class SqlBaseParser extends Parser {
@@ -28,33 +26,39 @@ class SqlBaseParser extends Parser {
NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52,
PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59,
SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67,
- USING=68, VERIFY=69, WHERE=70, WITH=71, EQ=72, NEQ=73, LT=74, LTE=75,
- GT=76, GTE=77, PLUS=78, MINUS=79, ASTERISK=80, SLASH=81, PERCENT=82, CONCAT=83,
- DOT=84, PARAM=85, STRING=86, INTEGER_VALUE=87, DECIMAL_VALUE=88, IDENTIFIER=89,
- DIGIT_IDENTIFIER=90, TABLE_IDENTIFIER=91, QUOTED_IDENTIFIER=92, BACKQUOTED_IDENTIFIER=93,
- SIMPLE_COMMENT=94, BRACKETED_COMMENT=95, WS=96, UNRECOGNIZED=97, DELIMITER=98;
+ USING=68, VERIFY=69, WHERE=70, WITH=71, ESCAPE_ESC=72, FUNCTION_ESC=73,
+ LIMIT_ESC=74, DATE_ESC=75, TIME_ESC=76, TIMESTAMP_ESC=77, GUID_ESC=78,
+ ESC_END=79, EQ=80, NEQ=81, LT=82, LTE=83, GT=84, GTE=85, PLUS=86, MINUS=87,
+ ASTERISK=88, SLASH=89, PERCENT=90, CONCAT=91, DOT=92, PARAM=93, STRING=94,
+ INTEGER_VALUE=95, DECIMAL_VALUE=96, IDENTIFIER=97, DIGIT_IDENTIFIER=98,
+ TABLE_IDENTIFIER=99, QUOTED_IDENTIFIER=100, BACKQUOTED_IDENTIFIER=101,
+ SIMPLE_COMMENT=102, BRACKETED_COMMENT=103, WS=104, UNRECOGNIZED=105, DELIMITER=106;
public static final int
RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2,
- RULE_query = 3, RULE_queryNoWith = 4, RULE_queryTerm = 5, RULE_orderBy = 6,
- RULE_querySpecification = 7, RULE_fromClause = 8, RULE_groupBy = 9, RULE_groupingElement = 10,
- RULE_groupingExpressions = 11, RULE_namedQuery = 12, RULE_setQuantifier = 13,
- RULE_selectItem = 14, RULE_relation = 15, RULE_joinRelation = 16, RULE_joinType = 17,
- RULE_joinCriteria = 18, RULE_relationPrimary = 19, RULE_expression = 20,
- RULE_booleanExpression = 21, RULE_predicated = 22, RULE_predicate = 23,
- RULE_pattern = 24, RULE_valueExpression = 25, RULE_primaryExpression = 26,
- RULE_constant = 27, RULE_comparisonOperator = 28, RULE_booleanValue = 29,
- RULE_dataType = 30, RULE_qualifiedName = 31, RULE_identifier = 32, RULE_tableIdentifier = 33,
- RULE_quoteIdentifier = 34, RULE_unquoteIdentifier = 35, RULE_number = 36,
- RULE_string = 37, RULE_nonReserved = 38;
+ RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6,
+ RULE_orderBy = 7, RULE_querySpecification = 8, RULE_fromClause = 9, RULE_groupBy = 10,
+ RULE_groupingElement = 11, RULE_groupingExpressions = 12, RULE_namedQuery = 13,
+ RULE_setQuantifier = 14, RULE_selectItem = 15, RULE_relation = 16, RULE_joinRelation = 17,
+ RULE_joinType = 18, RULE_joinCriteria = 19, RULE_relationPrimary = 20,
+ RULE_expression = 21, RULE_booleanExpression = 22, RULE_predicated = 23,
+ RULE_predicate = 24, RULE_pattern = 25, RULE_patternEscape = 26, RULE_valueExpression = 27,
+ RULE_primaryExpression = 28, RULE_castExpression = 29, RULE_castTemplate = 30,
+ RULE_extractExpression = 31, RULE_extractTemplate = 32, RULE_functionExpression = 33,
+ RULE_functionTemplate = 34, RULE_constant = 35, RULE_comparisonOperator = 36,
+ RULE_booleanValue = 37, RULE_dataType = 38, RULE_qualifiedName = 39, RULE_identifier = 40,
+ RULE_tableIdentifier = 41, RULE_quoteIdentifier = 42, RULE_unquoteIdentifier = 43,
+ RULE_number = 44, RULE_string = 45, RULE_nonReserved = 46;
public static final String[] ruleNames = {
"singleStatement", "singleExpression", "statement", "query", "queryNoWith",
- "queryTerm", "orderBy", "querySpecification", "fromClause", "groupBy",
- "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier",
+ "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause",
+ "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier",
"selectItem", "relation", "joinRelation", "joinType", "joinCriteria",
"relationPrimary", "expression", "booleanExpression", "predicated", "predicate",
- "pattern", "valueExpression", "primaryExpression", "constant", "comparisonOperator",
- "booleanValue", "dataType", "qualifiedName", "identifier", "tableIdentifier",
- "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved"
+ "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression",
+ "castTemplate", "extractExpression", "extractTemplate", "functionExpression",
+ "functionTemplate", "constant", "comparisonOperator", "booleanValue",
+ "dataType", "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier",
+ "unquoteIdentifier", "number", "string", "nonReserved"
};
private static final String[] _LITERAL_NAMES = {
@@ -68,8 +72,9 @@ class SqlBaseParser extends Parser {
"'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'",
"'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'",
"'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'",
- "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'",
- "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'"
+ "'WHERE'", "'WITH'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'",
+ "'{TS'", "'{GUID'", "'}'", "'='", null, "'<'", "'<='", "'>'", "'>='",
+ "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY",
@@ -80,12 +85,13 @@ class SqlBaseParser extends Parser {
"LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED",
"OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE",
"QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT",
- "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ",
- "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT",
- "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE",
- "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER",
- "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS",
- "UNRECOGNIZED", "DELIMITER"
+ "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC",
+ "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC",
+ "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
+ "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING",
+ "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER",
+ "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT",
+ "WS", "UNRECOGNIZED", "DELIMITER"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
@@ -166,9 +172,9 @@ class SqlBaseParser extends Parser {
try {
enterOuterAlt(_localctx, 1);
{
- setState(78);
+ setState(94);
statement();
- setState(79);
+ setState(95);
match(EOF);
}
}
@@ -213,9 +219,9 @@ class SqlBaseParser extends Parser {
try {
enterOuterAlt(_localctx, 1);
{
- setState(81);
+ setState(97);
expression();
- setState(82);
+ setState(98);
match(EOF);
}
}
@@ -601,14 +607,14 @@ class SqlBaseParser extends Parser {
enterRule(_localctx, 4, RULE_statement);
int _la;
try {
- setState(193);
+ setState(209);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) {
case 1:
_localctx = new StatementDefaultContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(84);
+ setState(100);
query();
}
break;
@@ -616,27 +622,27 @@ class SqlBaseParser extends Parser {
_localctx = new ExplainContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(85);
+ setState(101);
match(EXPLAIN);
- setState(99);
+ setState(115);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) {
case 1:
{
- setState(86);
+ setState(102);
match(T__0);
- setState(95);
+ setState(111);
_errHandler.sync(this);
_la = _input.LA(1);
while (((((_la - 28)) & ~0x3f) == 0 && ((1L << (_la - 28)) & ((1L << (FORMAT - 28)) | (1L << (PLAN - 28)) | (1L << (VERIFY - 28)))) != 0)) {
{
- setState(93);
+ setState(109);
switch (_input.LA(1)) {
case PLAN:
{
- setState(87);
+ setState(103);
match(PLAN);
- setState(88);
+ setState(104);
((ExplainContext)_localctx).type = _input.LT(1);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) {
@@ -648,9 +654,9 @@ class SqlBaseParser extends Parser {
break;
case FORMAT:
{
- setState(89);
+ setState(105);
match(FORMAT);
- setState(90);
+ setState(106);
((ExplainContext)_localctx).format = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==GRAPHVIZ || _la==TEXT) ) {
@@ -662,9 +668,9 @@ class SqlBaseParser extends Parser {
break;
case VERIFY:
{
- setState(91);
+ setState(107);
match(VERIFY);
- setState(92);
+ setState(108);
((ExplainContext)_localctx).verify = booleanValue();
}
break;
@@ -672,16 +678,16 @@ class SqlBaseParser extends Parser {
throw new NoViableAltException(this);
}
}
- setState(97);
+ setState(113);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(98);
+ setState(114);
match(T__1);
}
break;
}
- setState(101);
+ setState(117);
statement();
}
break;
@@ -689,27 +695,27 @@ class SqlBaseParser extends Parser {
_localctx = new DebugContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(102);
+ setState(118);
match(DEBUG);
- setState(114);
+ setState(130);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) {
case 1:
{
- setState(103);
+ setState(119);
match(T__0);
- setState(110);
+ setState(126);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==FORMAT || _la==PLAN) {
{
- setState(108);
+ setState(124);
switch (_input.LA(1)) {
case PLAN:
{
- setState(104);
+ setState(120);
match(PLAN);
- setState(105);
+ setState(121);
((DebugContext)_localctx).type = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==ANALYZED || _la==OPTIMIZED) ) {
@@ -721,9 +727,9 @@ class SqlBaseParser extends Parser {
break;
case FORMAT:
{
- setState(106);
+ setState(122);
match(FORMAT);
- setState(107);
+ setState(123);
((DebugContext)_localctx).format = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==GRAPHVIZ || _la==TEXT) ) {
@@ -737,16 +743,16 @@ class SqlBaseParser extends Parser {
throw new NoViableAltException(this);
}
}
- setState(112);
+ setState(128);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(113);
+ setState(129);
match(T__1);
}
break;
}
- setState(116);
+ setState(132);
statement();
}
break;
@@ -754,24 +760,24 @@ class SqlBaseParser extends Parser {
_localctx = new ShowTablesContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(117);
+ setState(133);
match(SHOW);
- setState(118);
+ setState(134);
match(TABLES);
- setState(123);
+ setState(139);
_la = _input.LA(1);
if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) {
{
- setState(120);
+ setState(136);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(119);
+ setState(135);
match(LIKE);
}
}
- setState(122);
+ setState(138);
pattern();
}
}
@@ -782,18 +788,18 @@ class SqlBaseParser extends Parser {
_localctx = new ShowColumnsContext(_localctx);
enterOuterAlt(_localctx, 5);
{
- setState(125);
+ setState(141);
match(SHOW);
- setState(126);
+ setState(142);
match(COLUMNS);
- setState(127);
+ setState(143);
_la = _input.LA(1);
if ( !(_la==FROM || _la==IN) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
- setState(128);
+ setState(144);
tableIdentifier();
}
break;
@@ -801,14 +807,14 @@ class SqlBaseParser extends Parser {
_localctx = new ShowColumnsContext(_localctx);
enterOuterAlt(_localctx, 6);
{
- setState(129);
+ setState(145);
_la = _input.LA(1);
if ( !(_la==DESC || _la==DESCRIBE) ) {
_errHandler.recoverInline(this);
} else {
consume();
}
- setState(130);
+ setState(146);
tableIdentifier();
}
break;
@@ -816,24 +822,24 @@ class SqlBaseParser extends Parser {
_localctx = new ShowFunctionsContext(_localctx);
enterOuterAlt(_localctx, 7);
{
- setState(131);
+ setState(147);
match(SHOW);
- setState(132);
+ setState(148);
match(FUNCTIONS);
- setState(137);
+ setState(153);
_la = _input.LA(1);
if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) {
{
- setState(134);
+ setState(150);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(133);
+ setState(149);
match(LIKE);
}
}
- setState(136);
+ setState(152);
pattern();
}
}
@@ -844,9 +850,9 @@ class SqlBaseParser extends Parser {
_localctx = new ShowSchemasContext(_localctx);
enterOuterAlt(_localctx, 8);
{
- setState(139);
+ setState(155);
match(SHOW);
- setState(140);
+ setState(156);
match(SCHEMAS);
}
break;
@@ -854,9 +860,9 @@ class SqlBaseParser extends Parser {
_localctx = new SysCatalogsContext(_localctx);
enterOuterAlt(_localctx, 9);
{
- setState(141);
+ setState(157);
match(SYS);
- setState(142);
+ setState(158);
match(CATALOGS);
}
break;
@@ -864,69 +870,69 @@ class SqlBaseParser extends Parser {
_localctx = new SysTablesContext(_localctx);
enterOuterAlt(_localctx, 10);
{
- setState(143);
+ setState(159);
match(SYS);
- setState(144);
+ setState(160);
match(TABLES);
- setState(150);
+ setState(166);
_la = _input.LA(1);
if (_la==CATALOG) {
{
- setState(145);
+ setState(161);
match(CATALOG);
- setState(147);
+ setState(163);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(146);
+ setState(162);
match(LIKE);
}
}
- setState(149);
+ setState(165);
((SysTablesContext)_localctx).clusterPattern = pattern();
}
}
- setState(156);
+ setState(172);
_la = _input.LA(1);
if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) {
{
- setState(153);
+ setState(169);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(152);
+ setState(168);
match(LIKE);
}
}
- setState(155);
+ setState(171);
((SysTablesContext)_localctx).tablePattern = pattern();
}
}
- setState(167);
+ setState(183);
_la = _input.LA(1);
if (_la==TYPE) {
{
- setState(158);
+ setState(174);
match(TYPE);
- setState(159);
+ setState(175);
string();
- setState(164);
+ setState(180);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(160);
+ setState(176);
match(T__2);
- setState(161);
+ setState(177);
string();
}
}
- setState(166);
+ setState(182);
_errHandler.sync(this);
_la = _input.LA(1);
}
@@ -939,55 +945,55 @@ class SqlBaseParser extends Parser {
_localctx = new SysColumnsContext(_localctx);
enterOuterAlt(_localctx, 11);
{
- setState(169);
+ setState(185);
match(SYS);
- setState(170);
+ setState(186);
match(COLUMNS);
- setState(173);
+ setState(189);
_la = _input.LA(1);
if (_la==CATALOG) {
{
- setState(171);
+ setState(187);
match(CATALOG);
- setState(172);
+ setState(188);
((SysColumnsContext)_localctx).cluster = string();
}
}
- setState(180);
+ setState(196);
_la = _input.LA(1);
if (_la==TABLE) {
{
- setState(175);
+ setState(191);
match(TABLE);
- setState(177);
+ setState(193);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(176);
+ setState(192);
match(LIKE);
}
}
- setState(179);
+ setState(195);
((SysColumnsContext)_localctx).indexPattern = pattern();
}
}
- setState(186);
+ setState(202);
_la = _input.LA(1);
if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) {
{
- setState(183);
+ setState(199);
_la = _input.LA(1);
if (_la==LIKE) {
{
- setState(182);
+ setState(198);
match(LIKE);
}
}
- setState(185);
+ setState(201);
((SysColumnsContext)_localctx).columnPattern = pattern();
}
}
@@ -998,9 +1004,9 @@ class SqlBaseParser extends Parser {
_localctx = new SysTypesContext(_localctx);
enterOuterAlt(_localctx, 12);
{
- setState(188);
+ setState(204);
match(SYS);
- setState(189);
+ setState(205);
match(TYPES);
}
break;
@@ -1008,11 +1014,11 @@ class SqlBaseParser extends Parser {
_localctx = new SysTableTypesContext(_localctx);
enterOuterAlt(_localctx, 13);
{
- setState(190);
+ setState(206);
match(SYS);
- setState(191);
+ setState(207);
match(TABLE);
- setState(192);
+ setState(208);
match(TYPES);
}
break;
@@ -1066,34 +1072,34 @@ class SqlBaseParser extends Parser {
try {
enterOuterAlt(_localctx, 1);
{
- setState(204);
+ setState(220);
_la = _input.LA(1);
if (_la==WITH) {
{
- setState(195);
+ setState(211);
match(WITH);
- setState(196);
+ setState(212);
namedQuery();
- setState(201);
+ setState(217);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(197);
+ setState(213);
match(T__2);
- setState(198);
+ setState(214);
namedQuery();
}
}
- setState(203);
+ setState(219);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
- setState(206);
+ setState(222);
queryNoWith();
}
}
@@ -1109,7 +1115,6 @@ class SqlBaseParser extends Parser {
}
public static class QueryNoWithContext extends ParserRuleContext {
- public Token limit;
public QueryTermContext queryTerm() {
return getRuleContext(QueryTermContext.class,0);
}
@@ -1121,9 +1126,9 @@ class SqlBaseParser extends Parser {
public OrderByContext orderBy(int i) {
return getRuleContext(OrderByContext.class,i);
}
- public TerminalNode LIMIT() { return getToken(SqlBaseParser.LIMIT, 0); }
- public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); }
- public TerminalNode ALL() { return getToken(SqlBaseParser.ALL, 0); }
+ public LimitClauseContext limitClause() {
+ return getRuleContext(LimitClauseContext.class,0);
+ }
public QueryNoWithContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@@ -1150,54 +1155,126 @@ class SqlBaseParser extends Parser {
try {
enterOuterAlt(_localctx, 1);
{
- setState(208);
+ setState(224);
queryTerm();
- setState(219);
+ setState(235);
_la = _input.LA(1);
if (_la==ORDER) {
{
- setState(209);
+ setState(225);
match(ORDER);
- setState(210);
+ setState(226);
match(BY);
- setState(211);
+ setState(227);
orderBy();
- setState(216);
+ setState(232);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(212);
+ setState(228);
match(T__2);
- setState(213);
+ setState(229);
orderBy();
}
}
- setState(218);
+ setState(234);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
- setState(223);
+ setState(238);
_la = _input.LA(1);
- if (_la==LIMIT) {
+ if (_la==LIMIT || _la==LIMIT_ESC) {
{
- setState(221);
+ setState(237);
+ limitClause();
+ }
+ }
+
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class LimitClauseContext extends ParserRuleContext {
+ public Token limit;
+ public TerminalNode LIMIT() { return getToken(SqlBaseParser.LIMIT, 0); }
+ public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); }
+ public TerminalNode ALL() { return getToken(SqlBaseParser.ALL, 0); }
+ public TerminalNode LIMIT_ESC() { return getToken(SqlBaseParser.LIMIT_ESC, 0); }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public LimitClauseContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_limitClause; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterLimitClause(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitLimitClause(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitLimitClause(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final LimitClauseContext limitClause() throws RecognitionException {
+ LimitClauseContext _localctx = new LimitClauseContext(_ctx, getState());
+ enterRule(_localctx, 10, RULE_limitClause);
+ int _la;
+ try {
+ setState(245);
+ switch (_input.LA(1)) {
+ case LIMIT:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(240);
match(LIMIT);
- setState(222);
- ((QueryNoWithContext)_localctx).limit = _input.LT(1);
+ setState(241);
+ ((LimitClauseContext)_localctx).limit = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==ALL || _la==INTEGER_VALUE) ) {
- ((QueryNoWithContext)_localctx).limit = (Token)_errHandler.recoverInline(this);
+ ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
}
- }
-
+ break;
+ case LIMIT_ESC:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(242);
+ match(LIMIT_ESC);
+ setState(243);
+ ((LimitClauseContext)_localctx).limit = _input.LT(1);
+ _la = _input.LA(1);
+ if ( !(_la==ALL || _la==INTEGER_VALUE) ) {
+ ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this);
+ } else {
+ consume();
+ }
+ setState(244);
+ match(ESC_END);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
@@ -1263,15 +1340,15 @@ class SqlBaseParser extends Parser {
public final QueryTermContext queryTerm() throws RecognitionException {
QueryTermContext _localctx = new QueryTermContext(_ctx, getState());
- enterRule(_localctx, 10, RULE_queryTerm);
+ enterRule(_localctx, 12, RULE_queryTerm);
try {
- setState(230);
+ setState(252);
switch (_input.LA(1)) {
case SELECT:
_localctx = new QueryPrimaryDefaultContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(225);
+ setState(247);
querySpecification();
}
break;
@@ -1279,11 +1356,11 @@ class SqlBaseParser extends Parser {
_localctx = new SubqueryContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(226);
+ setState(248);
match(T__0);
- setState(227);
+ setState(249);
queryNoWith();
- setState(228);
+ setState(250);
match(T__1);
}
break;
@@ -1330,18 +1407,18 @@ class SqlBaseParser extends Parser {
public final OrderByContext orderBy() throws RecognitionException {
OrderByContext _localctx = new OrderByContext(_ctx, getState());
- enterRule(_localctx, 12, RULE_orderBy);
+ enterRule(_localctx, 14, RULE_orderBy);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(232);
+ setState(254);
expression();
- setState(234);
+ setState(256);
_la = _input.LA(1);
if (_la==ASC || _la==DESC) {
{
- setState(233);
+ setState(255);
((OrderByContext)_localctx).ordering = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==ASC || _la==DESC) ) {
@@ -1415,80 +1492,80 @@ class SqlBaseParser extends Parser {
public final QuerySpecificationContext querySpecification() throws RecognitionException {
QuerySpecificationContext _localctx = new QuerySpecificationContext(_ctx, getState());
- enterRule(_localctx, 14, RULE_querySpecification);
+ enterRule(_localctx, 16, RULE_querySpecification);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(236);
+ setState(258);
match(SELECT);
- setState(238);
+ setState(260);
_la = _input.LA(1);
if (_la==ALL || _la==DISTINCT) {
{
- setState(237);
+ setState(259);
setQuantifier();
}
}
- setState(240);
+ setState(262);
selectItem();
- setState(245);
+ setState(267);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(241);
+ setState(263);
match(T__2);
- setState(242);
+ setState(264);
selectItem();
}
}
- setState(247);
+ setState(269);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(249);
+ setState(271);
_la = _input.LA(1);
if (_la==FROM) {
{
- setState(248);
+ setState(270);
fromClause();
}
}
- setState(253);
+ setState(275);
_la = _input.LA(1);
if (_la==WHERE) {
{
- setState(251);
+ setState(273);
match(WHERE);
- setState(252);
+ setState(274);
((QuerySpecificationContext)_localctx).where = booleanExpression(0);
}
}
- setState(258);
+ setState(280);
_la = _input.LA(1);
if (_la==GROUP) {
{
- setState(255);
+ setState(277);
match(GROUP);
- setState(256);
+ setState(278);
match(BY);
- setState(257);
+ setState(279);
groupBy();
}
}
- setState(262);
+ setState(284);
_la = _input.LA(1);
if (_la==HAVING) {
{
- setState(260);
+ setState(282);
match(HAVING);
- setState(261);
+ setState(283);
((QuerySpecificationContext)_localctx).having = booleanExpression(0);
}
}
@@ -1535,28 +1612,28 @@ class SqlBaseParser extends Parser {
public final FromClauseContext fromClause() throws RecognitionException {
FromClauseContext _localctx = new FromClauseContext(_ctx, getState());
- enterRule(_localctx, 16, RULE_fromClause);
+ enterRule(_localctx, 18, RULE_fromClause);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(264);
+ setState(286);
match(FROM);
- setState(265);
+ setState(287);
relation();
- setState(270);
+ setState(292);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(266);
+ setState(288);
match(T__2);
- setState(267);
+ setState(289);
relation();
}
}
- setState(272);
+ setState(294);
_errHandler.sync(this);
_la = _input.LA(1);
}
@@ -1604,35 +1681,35 @@ class SqlBaseParser extends Parser {
public final GroupByContext groupBy() throws RecognitionException {
GroupByContext _localctx = new GroupByContext(_ctx, getState());
- enterRule(_localctx, 18, RULE_groupBy);
+ enterRule(_localctx, 20, RULE_groupBy);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(274);
+ setState(296);
_la = _input.LA(1);
if (_la==ALL || _la==DISTINCT) {
{
- setState(273);
+ setState(295);
setQuantifier();
}
}
- setState(276);
+ setState(298);
groupingElement();
- setState(281);
+ setState(303);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(277);
+ setState(299);
match(T__2);
- setState(278);
+ setState(300);
groupingElement();
}
}
- setState(283);
+ setState(305);
_errHandler.sync(this);
_la = _input.LA(1);
}
@@ -1682,12 +1759,12 @@ class SqlBaseParser extends Parser {
public final GroupingElementContext groupingElement() throws RecognitionException {
GroupingElementContext _localctx = new GroupingElementContext(_ctx, getState());
- enterRule(_localctx, 20, RULE_groupingElement);
+ enterRule(_localctx, 22, RULE_groupingElement);
try {
_localctx = new SingleGroupingSetContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(284);
+ setState(306);
groupingExpressions();
}
}
@@ -1730,50 +1807,50 @@ class SqlBaseParser extends Parser {
public final GroupingExpressionsContext groupingExpressions() throws RecognitionException {
GroupingExpressionsContext _localctx = new GroupingExpressionsContext(_ctx, getState());
- enterRule(_localctx, 22, RULE_groupingExpressions);
+ enterRule(_localctx, 24, RULE_groupingExpressions);
int _la;
try {
- setState(299);
+ setState(321);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(286);
+ setState(308);
match(T__0);
- setState(295);
+ setState(317);
_la = _input.LA(1);
- if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(287);
+ setState(309);
expression();
- setState(292);
+ setState(314);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(288);
+ setState(310);
match(T__2);
- setState(289);
+ setState(311);
expression();
}
}
- setState(294);
+ setState(316);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
- setState(297);
+ setState(319);
match(T__1);
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(298);
+ setState(320);
expression();
}
break;
@@ -1820,19 +1897,19 @@ class SqlBaseParser extends Parser {
public final NamedQueryContext namedQuery() throws RecognitionException {
NamedQueryContext _localctx = new NamedQueryContext(_ctx, getState());
- enterRule(_localctx, 24, RULE_namedQuery);
+ enterRule(_localctx, 26, RULE_namedQuery);
try {
enterOuterAlt(_localctx, 1);
{
- setState(301);
+ setState(323);
((NamedQueryContext)_localctx).name = identifier();
- setState(302);
+ setState(324);
match(AS);
- setState(303);
+ setState(325);
match(T__0);
- setState(304);
+ setState(326);
queryNoWith();
- setState(305);
+ setState(327);
match(T__1);
}
}
@@ -1871,12 +1948,12 @@ class SqlBaseParser extends Parser {
public final SetQuantifierContext setQuantifier() throws RecognitionException {
SetQuantifierContext _localctx = new SetQuantifierContext(_ctx, getState());
- enterRule(_localctx, 26, RULE_setQuantifier);
+ enterRule(_localctx, 28, RULE_setQuantifier);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(307);
+ setState(329);
_la = _input.LA(1);
if ( !(_la==ALL || _la==DISTINCT) ) {
_errHandler.recoverInline(this);
@@ -1933,28 +2010,28 @@ class SqlBaseParser extends Parser {
public final SelectItemContext selectItem() throws RecognitionException {
SelectItemContext _localctx = new SelectItemContext(_ctx, getState());
- enterRule(_localctx, 28, RULE_selectItem);
+ enterRule(_localctx, 30, RULE_selectItem);
int _la;
try {
_localctx = new SelectExpressionContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(309);
+ setState(331);
expression();
- setState(314);
+ setState(336);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(311);
+ setState(333);
_la = _input.LA(1);
if (_la==AS) {
{
- setState(310);
+ setState(332);
match(AS);
}
}
- setState(313);
+ setState(335);
identifier();
}
}
@@ -2003,24 +2080,24 @@ class SqlBaseParser extends Parser {
public final RelationContext relation() throws RecognitionException {
RelationContext _localctx = new RelationContext(_ctx, getState());
- enterRule(_localctx, 30, RULE_relation);
+ enterRule(_localctx, 32, RULE_relation);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(316);
+ setState(338);
relationPrimary();
- setState(320);
+ setState(342);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) {
{
{
- setState(317);
+ setState(339);
joinRelation();
}
}
- setState(322);
+ setState(344);
_errHandler.sync(this);
_la = _input.LA(1);
}
@@ -2071,10 +2148,10 @@ class SqlBaseParser extends Parser {
public final JoinRelationContext joinRelation() throws RecognitionException {
JoinRelationContext _localctx = new JoinRelationContext(_ctx, getState());
- enterRule(_localctx, 32, RULE_joinRelation);
+ enterRule(_localctx, 34, RULE_joinRelation);
int _la;
try {
- setState(334);
+ setState(356);
switch (_input.LA(1)) {
case FULL:
case INNER:
@@ -2084,18 +2161,18 @@ class SqlBaseParser extends Parser {
enterOuterAlt(_localctx, 1);
{
{
- setState(323);
+ setState(345);
joinType();
}
- setState(324);
+ setState(346);
match(JOIN);
- setState(325);
+ setState(347);
((JoinRelationContext)_localctx).right = relationPrimary();
- setState(327);
+ setState(349);
_la = _input.LA(1);
if (_la==ON || _la==USING) {
{
- setState(326);
+ setState(348);
joinCriteria();
}
}
@@ -2105,13 +2182,13 @@ class SqlBaseParser extends Parser {
case NATURAL:
enterOuterAlt(_localctx, 2);
{
- setState(329);
+ setState(351);
match(NATURAL);
- setState(330);
+ setState(352);
joinType();
- setState(331);
+ setState(353);
match(JOIN);
- setState(332);
+ setState(354);
((JoinRelationContext)_localctx).right = relationPrimary();
}
break;
@@ -2157,20 +2234,20 @@ class SqlBaseParser extends Parser {
public final JoinTypeContext joinType() throws RecognitionException {
JoinTypeContext _localctx = new JoinTypeContext(_ctx, getState());
- enterRule(_localctx, 34, RULE_joinType);
+ enterRule(_localctx, 36, RULE_joinType);
int _la;
try {
- setState(351);
+ setState(373);
switch (_input.LA(1)) {
case INNER:
case JOIN:
enterOuterAlt(_localctx, 1);
{
- setState(337);
+ setState(359);
_la = _input.LA(1);
if (_la==INNER) {
{
- setState(336);
+ setState(358);
match(INNER);
}
}
@@ -2180,13 +2257,13 @@ class SqlBaseParser extends Parser {
case LEFT:
enterOuterAlt(_localctx, 2);
{
- setState(339);
+ setState(361);
match(LEFT);
- setState(341);
+ setState(363);
_la = _input.LA(1);
if (_la==OUTER) {
{
- setState(340);
+ setState(362);
match(OUTER);
}
}
@@ -2196,13 +2273,13 @@ class SqlBaseParser extends Parser {
case RIGHT:
enterOuterAlt(_localctx, 3);
{
- setState(343);
+ setState(365);
match(RIGHT);
- setState(345);
+ setState(367);
_la = _input.LA(1);
if (_la==OUTER) {
{
- setState(344);
+ setState(366);
match(OUTER);
}
}
@@ -2212,13 +2289,13 @@ class SqlBaseParser extends Parser {
case FULL:
enterOuterAlt(_localctx, 4);
{
- setState(347);
+ setState(369);
match(FULL);
- setState(349);
+ setState(371);
_la = _input.LA(1);
if (_la==OUTER) {
{
- setState(348);
+ setState(370);
match(OUTER);
}
}
@@ -2273,46 +2350,46 @@ class SqlBaseParser extends Parser {
public final JoinCriteriaContext joinCriteria() throws RecognitionException {
JoinCriteriaContext _localctx = new JoinCriteriaContext(_ctx, getState());
- enterRule(_localctx, 36, RULE_joinCriteria);
+ enterRule(_localctx, 38, RULE_joinCriteria);
int _la;
try {
- setState(367);
+ setState(389);
switch (_input.LA(1)) {
case ON:
enterOuterAlt(_localctx, 1);
{
- setState(353);
+ setState(375);
match(ON);
- setState(354);
+ setState(376);
booleanExpression(0);
}
break;
case USING:
enterOuterAlt(_localctx, 2);
{
- setState(355);
+ setState(377);
match(USING);
- setState(356);
+ setState(378);
match(T__0);
- setState(357);
+ setState(379);
identifier();
- setState(362);
+ setState(384);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(358);
+ setState(380);
match(T__2);
- setState(359);
+ setState(381);
identifier();
}
}
- setState(364);
+ setState(386);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(365);
+ setState(387);
match(T__1);
}
break;
@@ -2414,32 +2491,32 @@ class SqlBaseParser extends Parser {
public final RelationPrimaryContext relationPrimary() throws RecognitionException {
RelationPrimaryContext _localctx = new RelationPrimaryContext(_ctx, getState());
- enterRule(_localctx, 38, RULE_relationPrimary);
+ enterRule(_localctx, 40, RULE_relationPrimary);
int _la;
try {
- setState(394);
+ setState(416);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) {
case 1:
_localctx = new TableNameContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(369);
+ setState(391);
tableIdentifier();
- setState(374);
+ setState(396);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(371);
+ setState(393);
_la = _input.LA(1);
if (_la==AS) {
{
- setState(370);
+ setState(392);
match(AS);
}
}
- setState(373);
+ setState(395);
qualifiedName();
}
}
@@ -2450,26 +2527,26 @@ class SqlBaseParser extends Parser {
_localctx = new AliasedQueryContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(376);
+ setState(398);
match(T__0);
- setState(377);
+ setState(399);
queryNoWith();
- setState(378);
+ setState(400);
match(T__1);
- setState(383);
+ setState(405);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(380);
+ setState(402);
_la = _input.LA(1);
if (_la==AS) {
{
- setState(379);
+ setState(401);
match(AS);
}
}
- setState(382);
+ setState(404);
qualifiedName();
}
}
@@ -2480,26 +2557,26 @@ class SqlBaseParser extends Parser {
_localctx = new AliasedRelationContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(385);
+ setState(407);
match(T__0);
- setState(386);
+ setState(408);
relation();
- setState(387);
+ setState(409);
match(T__1);
- setState(392);
+ setState(414);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(389);
+ setState(411);
_la = _input.LA(1);
if (_la==AS) {
{
- setState(388);
+ setState(410);
match(AS);
}
}
- setState(391);
+ setState(413);
qualifiedName();
}
}
@@ -2544,11 +2621,11 @@ class SqlBaseParser extends Parser {
public final ExpressionContext expression() throws RecognitionException {
ExpressionContext _localctx = new ExpressionContext(_ctx, getState());
- enterRule(_localctx, 40, RULE_expression);
+ enterRule(_localctx, 42, RULE_expression);
try {
enterOuterAlt(_localctx, 1);
{
- setState(396);
+ setState(418);
booleanExpression(0);
}
}
@@ -2750,25 +2827,25 @@ class SqlBaseParser extends Parser {
int _parentState = getState();
BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState);
BooleanExpressionContext _prevctx = _localctx;
- int _startState = 42;
- enterRecursionRule(_localctx, 42, RULE_booleanExpression, _p);
+ int _startState = 44;
+ enterRecursionRule(_localctx, 44, RULE_booleanExpression, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(447);
+ setState(469);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) {
case 1:
{
_localctx = new LogicalNotContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(399);
+ setState(421);
match(NOT);
- setState(400);
+ setState(422);
booleanExpression(8);
}
break;
@@ -2777,13 +2854,13 @@ class SqlBaseParser extends Parser {
_localctx = new ExistsContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(401);
+ setState(423);
match(EXISTS);
- setState(402);
+ setState(424);
match(T__0);
- setState(403);
+ setState(425);
query();
- setState(404);
+ setState(426);
match(T__1);
}
break;
@@ -2792,29 +2869,29 @@ class SqlBaseParser extends Parser {
_localctx = new StringQueryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(406);
+ setState(428);
match(QUERY);
- setState(407);
+ setState(429);
match(T__0);
- setState(408);
+ setState(430);
((StringQueryContext)_localctx).queryString = string();
- setState(413);
+ setState(435);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(409);
+ setState(431);
match(T__2);
- setState(410);
+ setState(432);
((StringQueryContext)_localctx).options = string();
}
}
- setState(415);
+ setState(437);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(416);
+ setState(438);
match(T__1);
}
break;
@@ -2823,33 +2900,33 @@ class SqlBaseParser extends Parser {
_localctx = new MatchQueryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(418);
+ setState(440);
match(MATCH);
- setState(419);
+ setState(441);
match(T__0);
- setState(420);
+ setState(442);
((MatchQueryContext)_localctx).singleField = qualifiedName();
- setState(421);
+ setState(443);
match(T__2);
- setState(422);
+ setState(444);
((MatchQueryContext)_localctx).queryString = string();
- setState(427);
+ setState(449);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(423);
+ setState(445);
match(T__2);
- setState(424);
+ setState(446);
((MatchQueryContext)_localctx).options = string();
}
}
- setState(429);
+ setState(451);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(430);
+ setState(452);
match(T__1);
}
break;
@@ -2858,33 +2935,33 @@ class SqlBaseParser extends Parser {
_localctx = new MultiMatchQueryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(432);
+ setState(454);
match(MATCH);
- setState(433);
+ setState(455);
match(T__0);
- setState(434);
+ setState(456);
((MultiMatchQueryContext)_localctx).multiFields = string();
- setState(435);
+ setState(457);
match(T__2);
- setState(436);
+ setState(458);
((MultiMatchQueryContext)_localctx).queryString = string();
- setState(441);
+ setState(463);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(437);
+ setState(459);
match(T__2);
- setState(438);
+ setState(460);
((MultiMatchQueryContext)_localctx).options = string();
}
}
- setState(443);
+ setState(465);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(444);
+ setState(466);
match(T__1);
}
break;
@@ -2893,33 +2970,33 @@ class SqlBaseParser extends Parser {
_localctx = new BooleanDefaultContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(446);
+ setState(468);
predicated();
}
break;
}
_ctx.stop = _input.LT(-1);
- setState(457);
+ setState(479);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,65,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,66,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(455);
+ setState(477);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) {
case 1:
{
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(449);
+ setState(471);
if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
- setState(450);
+ setState(472);
((LogicalBinaryContext)_localctx).operator = match(AND);
- setState(451);
+ setState(473);
((LogicalBinaryContext)_localctx).right = booleanExpression(3);
}
break;
@@ -2928,20 +3005,20 @@ class SqlBaseParser extends Parser {
_localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState));
((LogicalBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression);
- setState(452);
+ setState(474);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(453);
+ setState(475);
((LogicalBinaryContext)_localctx).operator = match(OR);
- setState(454);
+ setState(476);
((LogicalBinaryContext)_localctx).right = booleanExpression(2);
}
break;
}
}
}
- setState(459);
+ setState(481);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,65,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,66,_ctx);
}
}
}
@@ -2984,18 +3061,18 @@ class SqlBaseParser extends Parser {
public final PredicatedContext predicated() throws RecognitionException {
PredicatedContext _localctx = new PredicatedContext(_ctx, getState());
- enterRule(_localctx, 44, RULE_predicated);
+ enterRule(_localctx, 46, RULE_predicated);
try {
enterOuterAlt(_localctx, 1);
{
- setState(460);
+ setState(482);
valueExpression(0);
- setState(462);
+ setState(484);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,66,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,67,_ctx) ) {
case 1:
{
- setState(461);
+ setState(483);
predicate();
}
break;
@@ -3068,145 +3145,145 @@ class SqlBaseParser extends Parser {
public final PredicateContext predicate() throws RecognitionException {
PredicateContext _localctx = new PredicateContext(_ctx, getState());
- enterRule(_localctx, 46, RULE_predicate);
+ enterRule(_localctx, 48, RULE_predicate);
int _la;
try {
- setState(510);
+ setState(532);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,74,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(465);
+ setState(487);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(464);
+ setState(486);
match(NOT);
}
}
- setState(467);
+ setState(489);
((PredicateContext)_localctx).kind = match(BETWEEN);
- setState(468);
+ setState(490);
((PredicateContext)_localctx).lower = valueExpression(0);
- setState(469);
+ setState(491);
match(AND);
- setState(470);
+ setState(492);
((PredicateContext)_localctx).upper = valueExpression(0);
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(473);
+ setState(495);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(472);
+ setState(494);
match(NOT);
}
}
- setState(475);
+ setState(497);
((PredicateContext)_localctx).kind = match(IN);
- setState(476);
+ setState(498);
match(T__0);
- setState(477);
+ setState(499);
expression();
- setState(482);
+ setState(504);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==T__2) {
{
{
- setState(478);
+ setState(500);
match(T__2);
- setState(479);
+ setState(501);
expression();
}
}
- setState(484);
+ setState(506);
_errHandler.sync(this);
_la = _input.LA(1);
}
- setState(485);
+ setState(507);
match(T__1);
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
- setState(488);
+ setState(510);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(487);
+ setState(509);
match(NOT);
}
}
- setState(490);
+ setState(512);
((PredicateContext)_localctx).kind = match(IN);
- setState(491);
+ setState(513);
match(T__0);
- setState(492);
+ setState(514);
query();
- setState(493);
+ setState(515);
match(T__1);
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
- setState(496);
+ setState(518);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(495);
+ setState(517);
match(NOT);
}
}
- setState(498);
+ setState(520);
((PredicateContext)_localctx).kind = match(LIKE);
- setState(499);
+ setState(521);
pattern();
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
- setState(501);
+ setState(523);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(500);
+ setState(522);
match(NOT);
}
}
- setState(503);
+ setState(525);
((PredicateContext)_localctx).kind = match(RLIKE);
- setState(504);
+ setState(526);
((PredicateContext)_localctx).regex = string();
}
break;
case 6:
enterOuterAlt(_localctx, 6);
{
- setState(505);
+ setState(527);
match(IS);
- setState(507);
+ setState(529);
_la = _input.LA(1);
if (_la==NOT) {
{
- setState(506);
+ setState(528);
match(NOT);
}
}
- setState(509);
+ setState(531);
((PredicateContext)_localctx).kind = match(NULL);
}
break;
@@ -3225,14 +3302,12 @@ class SqlBaseParser extends Parser {
public static class PatternContext extends ParserRuleContext {
public StringContext value;
- public StringContext escape;
- public List string() {
- return getRuleContexts(StringContext.class);
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
}
- public StringContext string(int i) {
- return getRuleContext(StringContext.class,i);
+ public PatternEscapeContext patternEscape() {
+ return getRuleContext(PatternEscapeContext.class,0);
}
- public TerminalNode ESCAPE() { return getToken(SqlBaseParser.ESCAPE, 0); }
public PatternContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@@ -3254,21 +3329,19 @@ class SqlBaseParser extends Parser {
public final PatternContext pattern() throws RecognitionException {
PatternContext _localctx = new PatternContext(_ctx, getState());
- enterRule(_localctx, 48, RULE_pattern);
+ enterRule(_localctx, 50, RULE_pattern);
try {
enterOuterAlt(_localctx, 1);
{
- setState(512);
+ setState(534);
((PatternContext)_localctx).value = string();
- setState(515);
+ setState(536);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) {
case 1:
{
- setState(513);
- match(ESCAPE);
- setState(514);
- ((PatternContext)_localctx).escape = string();
+ setState(535);
+ patternEscape();
}
break;
}
@@ -3285,6 +3358,73 @@ class SqlBaseParser extends Parser {
return _localctx;
}
+ public static class PatternEscapeContext extends ParserRuleContext {
+ public StringContext escape;
+ public TerminalNode ESCAPE() { return getToken(SqlBaseParser.ESCAPE, 0); }
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
+ }
+ public TerminalNode ESCAPE_ESC() { return getToken(SqlBaseParser.ESCAPE_ESC, 0); }
+ public PatternEscapeContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_patternEscape; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPatternEscape(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPatternEscape(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitPatternEscape(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final PatternEscapeContext patternEscape() throws RecognitionException {
+ PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState());
+ enterRule(_localctx, 52, RULE_patternEscape);
+ try {
+ setState(544);
+ switch (_input.LA(1)) {
+ case ESCAPE:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(538);
+ match(ESCAPE);
+ setState(539);
+ ((PatternEscapeContext)_localctx).escape = string();
+ }
+ break;
+ case ESCAPE_ESC:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(540);
+ match(ESCAPE_ESC);
+ setState(541);
+ ((PatternEscapeContext)_localctx).escape = string();
+ setState(542);
+ match(ESC_END);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
public static class ValueExpressionContext extends ParserRuleContext {
public ValueExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
@@ -3404,14 +3544,14 @@ class SqlBaseParser extends Parser {
int _parentState = getState();
ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, _parentState);
ValueExpressionContext _prevctx = _localctx;
- int _startState = 50;
- enterRecursionRule(_localctx, 50, RULE_valueExpression, _p);
+ int _startState = 54;
+ enterRecursionRule(_localctx, 54, RULE_valueExpression, _p);
int _la;
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(521);
+ setState(550);
switch (_input.LA(1)) {
case T__0:
case ANALYZE:
@@ -3444,6 +3584,11 @@ class SqlBaseParser extends Parser {
case TYPE:
case TYPES:
case VERIFY:
+ case FUNCTION_ESC:
+ case DATE_ESC:
+ case TIME_ESC:
+ case TIMESTAMP_ESC:
+ case GUID_ESC:
case ASTERISK:
case PARAM:
case STRING:
@@ -3458,7 +3603,7 @@ class SqlBaseParser extends Parser {
_ctx = _localctx;
_prevctx = _localctx;
- setState(518);
+ setState(547);
primaryExpression();
}
break;
@@ -3468,7 +3613,7 @@ class SqlBaseParser extends Parser {
_localctx = new ArithmeticUnaryContext(_localctx);
_ctx = _localctx;
_prevctx = _localctx;
- setState(519);
+ setState(548);
((ArithmeticUnaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -3476,7 +3621,7 @@ class SqlBaseParser extends Parser {
} else {
consume();
}
- setState(520);
+ setState(549);
valueExpression(4);
}
break;
@@ -3484,33 +3629,33 @@ class SqlBaseParser extends Parser {
throw new NoViableAltException(this);
}
_ctx.stop = _input.LT(-1);
- setState(535);
+ setState(564);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,78,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,80,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
if ( _parseListeners!=null ) triggerExitRuleEvent();
_prevctx = _localctx;
{
- setState(533);
+ setState(562);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) {
case 1:
{
_localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
- setState(523);
+ setState(552);
if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)");
- setState(524);
+ setState(553);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
- if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (ASTERISK - 80)) | (1L << (SLASH - 80)) | (1L << (PERCENT - 80)))) != 0)) ) {
+ if ( !(((((_la - 88)) & ~0x3f) == 0 && ((1L << (_la - 88)) & ((1L << (ASTERISK - 88)) | (1L << (SLASH - 88)) | (1L << (PERCENT - 88)))) != 0)) ) {
((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this);
} else {
consume();
}
- setState(525);
+ setState(554);
((ArithmeticBinaryContext)_localctx).right = valueExpression(4);
}
break;
@@ -3519,9 +3664,9 @@ class SqlBaseParser extends Parser {
_localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState));
((ArithmeticBinaryContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
- setState(526);
+ setState(555);
if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)");
- setState(527);
+ setState(556);
((ArithmeticBinaryContext)_localctx).operator = _input.LT(1);
_la = _input.LA(1);
if ( !(_la==PLUS || _la==MINUS) ) {
@@ -3529,7 +3674,7 @@ class SqlBaseParser extends Parser {
} else {
consume();
}
- setState(528);
+ setState(557);
((ArithmeticBinaryContext)_localctx).right = valueExpression(3);
}
break;
@@ -3538,20 +3683,20 @@ class SqlBaseParser extends Parser {
_localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState));
((ComparisonContext)_localctx).left = _prevctx;
pushNewRecursionContext(_localctx, _startState, RULE_valueExpression);
- setState(529);
+ setState(558);
if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)");
- setState(530);
+ setState(559);
comparisonOperator();
- setState(531);
+ setState(560);
((ComparisonContext)_localctx).right = valueExpression(2);
}
break;
}
}
}
- setState(537);
+ setState(566);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,78,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,80,_ctx);
}
}
}
@@ -3597,13 +3742,8 @@ class SqlBaseParser extends Parser {
}
}
public static class CastContext extends PrimaryExpressionContext {
- public TerminalNode CAST() { return getToken(SqlBaseParser.CAST, 0); }
- public ExpressionContext expression() {
- return getRuleContext(ExpressionContext.class,0);
- }
- public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); }
- public DataTypeContext dataType() {
- return getRuleContext(DataTypeContext.class,0);
+ public CastExpressionContext castExpression() {
+ return getRuleContext(CastExpressionContext.class,0);
}
public CastContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
@@ -3659,14 +3799,8 @@ class SqlBaseParser extends Parser {
}
}
public static class ExtractContext extends PrimaryExpressionContext {
- public IdentifierContext field;
- public TerminalNode EXTRACT() { return getToken(SqlBaseParser.EXTRACT, 0); }
- public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); }
- public ValueExpressionContext valueExpression() {
- return getRuleContext(ValueExpressionContext.class,0);
- }
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
+ public ExtractExpressionContext extractExpression() {
+ return getRuleContext(ExtractExpressionContext.class,0);
}
public ExtractContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
@@ -3723,31 +3857,22 @@ class SqlBaseParser extends Parser {
else return visitor.visitChildren(this);
}
}
- public static class FunctionCallContext extends PrimaryExpressionContext {
- public IdentifierContext identifier() {
- return getRuleContext(IdentifierContext.class,0);
+ public static class FunctionContext extends PrimaryExpressionContext {
+ public FunctionExpressionContext functionExpression() {
+ return getRuleContext(FunctionExpressionContext.class,0);
}
- public List expression() {
- return getRuleContexts(ExpressionContext.class);
- }
- public ExpressionContext expression(int i) {
- return getRuleContext(ExpressionContext.class,i);
- }
- public SetQuantifierContext setQuantifier() {
- return getRuleContext(SetQuantifierContext.class,0);
- }
- public FunctionCallContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
+ public FunctionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); }
@Override
public void enterRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionCall(this);
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunction(this);
}
@Override
public void exitRule(ParseTreeListener listener) {
- if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionCall(this);
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunction(this);
}
@Override
public T accept(ParseTreeVisitor extends T> visitor) {
- if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitFunctionCall(this);
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitFunction(this);
else return visitor.visitChildren(this);
}
}
@@ -3773,53 +3898,33 @@ class SqlBaseParser extends Parser {
public final PrimaryExpressionContext primaryExpression() throws RecognitionException {
PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState());
- enterRule(_localctx, 52, RULE_primaryExpression);
+ enterRule(_localctx, 56, RULE_primaryExpression);
int _la;
try {
- setState(587);
+ setState(588);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,83,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) {
case 1:
_localctx = new CastContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(538);
- match(CAST);
- setState(539);
- match(T__0);
- setState(540);
- expression();
- setState(541);
- match(AS);
- setState(542);
- dataType();
- setState(543);
- match(T__1);
+ setState(567);
+ castExpression();
}
break;
case 2:
_localctx = new ExtractContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(545);
- match(EXTRACT);
- setState(546);
- match(T__0);
- setState(547);
- ((ExtractContext)_localctx).field = identifier();
- setState(548);
- match(FROM);
- setState(549);
- valueExpression(0);
- setState(550);
- match(T__1);
+ setState(568);
+ extractExpression();
}
break;
case 3:
_localctx = new ConstantDefaultContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(552);
+ setState(569);
constant();
}
break;
@@ -3827,7 +3932,7 @@ class SqlBaseParser extends Parser {
_localctx = new StarContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(553);
+ setState(570);
match(ASTERISK);
}
break;
@@ -3835,76 +3940,38 @@ class SqlBaseParser extends Parser {
_localctx = new StarContext(_localctx);
enterOuterAlt(_localctx, 5);
{
- setState(557);
+ setState(574);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(554);
+ setState(571);
qualifiedName();
- setState(555);
+ setState(572);
match(DOT);
}
}
- setState(559);
+ setState(576);
match(ASTERISK);
}
break;
case 6:
- _localctx = new FunctionCallContext(_localctx);
+ _localctx = new FunctionContext(_localctx);
enterOuterAlt(_localctx, 6);
{
- setState(560);
- identifier();
- setState(561);
- match(T__0);
- setState(573);
- _la = _input.LA(1);
- if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
- {
- setState(563);
- _la = _input.LA(1);
- if (_la==ALL || _la==DISTINCT) {
- {
- setState(562);
- setQuantifier();
- }
- }
-
- setState(565);
- expression();
- setState(570);
- _errHandler.sync(this);
- _la = _input.LA(1);
- while (_la==T__2) {
- {
- {
- setState(566);
- match(T__2);
- setState(567);
- expression();
- }
- }
- setState(572);
- _errHandler.sync(this);
- _la = _input.LA(1);
- }
- }
- }
-
- setState(575);
- match(T__1);
+ setState(577);
+ functionExpression();
}
break;
case 7:
_localctx = new SubqueryExpressionContext(_localctx);
enterOuterAlt(_localctx, 7);
{
- setState(577);
- match(T__0);
setState(578);
- query();
+ match(T__0);
setState(579);
+ query();
+ setState(580);
match(T__1);
}
break;
@@ -3912,7 +3979,7 @@ class SqlBaseParser extends Parser {
_localctx = new ColumnReferenceContext(_localctx);
enterOuterAlt(_localctx, 8);
{
- setState(581);
+ setState(582);
identifier();
}
break;
@@ -3920,7 +3987,7 @@ class SqlBaseParser extends Parser {
_localctx = new DereferenceContext(_localctx);
enterOuterAlt(_localctx, 9);
{
- setState(582);
+ setState(583);
qualifiedName();
}
break;
@@ -3928,11 +3995,11 @@ class SqlBaseParser extends Parser {
_localctx = new ParenthesizedExpressionContext(_localctx);
enterOuterAlt(_localctx, 10);
{
- setState(583);
- match(T__0);
setState(584);
- expression();
+ match(T__0);
setState(585);
+ expression();
+ setState(586);
match(T__1);
}
break;
@@ -3949,6 +4016,436 @@ class SqlBaseParser extends Parser {
return _localctx;
}
+ public static class CastExpressionContext extends ParserRuleContext {
+ public CastTemplateContext castTemplate() {
+ return getRuleContext(CastTemplateContext.class,0);
+ }
+ public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public CastExpressionContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_castExpression; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastExpression(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastExpression(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitCastExpression(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final CastExpressionContext castExpression() throws RecognitionException {
+ CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState());
+ enterRule(_localctx, 58, RULE_castExpression);
+ try {
+ setState(595);
+ switch (_input.LA(1)) {
+ case CAST:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(590);
+ castTemplate();
+ }
+ break;
+ case FUNCTION_ESC:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(591);
+ match(FUNCTION_ESC);
+ setState(592);
+ castTemplate();
+ setState(593);
+ match(ESC_END);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class CastTemplateContext extends ParserRuleContext {
+ public TerminalNode CAST() { return getToken(SqlBaseParser.CAST, 0); }
+ public ExpressionContext expression() {
+ return getRuleContext(ExpressionContext.class,0);
+ }
+ public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); }
+ public DataTypeContext dataType() {
+ return getRuleContext(DataTypeContext.class,0);
+ }
+ public CastTemplateContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_castTemplate; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastTemplate(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastTemplate(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitCastTemplate(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final CastTemplateContext castTemplate() throws RecognitionException {
+ CastTemplateContext _localctx = new CastTemplateContext(_ctx, getState());
+ enterRule(_localctx, 60, RULE_castTemplate);
+ try {
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(597);
+ match(CAST);
+ setState(598);
+ match(T__0);
+ setState(599);
+ expression();
+ setState(600);
+ match(AS);
+ setState(601);
+ dataType();
+ setState(602);
+ match(T__1);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class ExtractExpressionContext extends ParserRuleContext {
+ public ExtractTemplateContext extractTemplate() {
+ return getRuleContext(ExtractTemplateContext.class,0);
+ }
+ public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public ExtractExpressionContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_extractExpression; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterExtractExpression(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitExtractExpression(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitExtractExpression(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final ExtractExpressionContext extractExpression() throws RecognitionException {
+ ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState());
+ enterRule(_localctx, 62, RULE_extractExpression);
+ try {
+ setState(609);
+ switch (_input.LA(1)) {
+ case EXTRACT:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(604);
+ extractTemplate();
+ }
+ break;
+ case FUNCTION_ESC:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(605);
+ match(FUNCTION_ESC);
+ setState(606);
+ extractTemplate();
+ setState(607);
+ match(ESC_END);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class ExtractTemplateContext extends ParserRuleContext {
+ public IdentifierContext field;
+ public TerminalNode EXTRACT() { return getToken(SqlBaseParser.EXTRACT, 0); }
+ public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); }
+ public ValueExpressionContext valueExpression() {
+ return getRuleContext(ValueExpressionContext.class,0);
+ }
+ public IdentifierContext identifier() {
+ return getRuleContext(IdentifierContext.class,0);
+ }
+ public ExtractTemplateContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_extractTemplate; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterExtractTemplate(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitExtractTemplate(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitExtractTemplate(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final ExtractTemplateContext extractTemplate() throws RecognitionException {
+ ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState());
+ enterRule(_localctx, 64, RULE_extractTemplate);
+ try {
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(611);
+ match(EXTRACT);
+ setState(612);
+ match(T__0);
+ setState(613);
+ ((ExtractTemplateContext)_localctx).field = identifier();
+ setState(614);
+ match(FROM);
+ setState(615);
+ valueExpression(0);
+ setState(616);
+ match(T__1);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class FunctionExpressionContext extends ParserRuleContext {
+ public FunctionTemplateContext functionTemplate() {
+ return getRuleContext(FunctionTemplateContext.class,0);
+ }
+ public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); }
+ public FunctionExpressionContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_functionExpression; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionExpression(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionExpression(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitFunctionExpression(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final FunctionExpressionContext functionExpression() throws RecognitionException {
+ FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState());
+ enterRule(_localctx, 66, RULE_functionExpression);
+ try {
+ setState(623);
+ switch (_input.LA(1)) {
+ case ANALYZE:
+ case ANALYZED:
+ case CATALOGS:
+ case COLUMNS:
+ case DEBUG:
+ case EXECUTABLE:
+ case EXPLAIN:
+ case FORMAT:
+ case FUNCTIONS:
+ case GRAPHVIZ:
+ case MAPPED:
+ case OPTIMIZED:
+ case PARSED:
+ case PHYSICAL:
+ case PLAN:
+ case RLIKE:
+ case QUERY:
+ case SCHEMAS:
+ case SHOW:
+ case SYS:
+ case TABLES:
+ case TEXT:
+ case TYPE:
+ case TYPES:
+ case VERIFY:
+ case IDENTIFIER:
+ case DIGIT_IDENTIFIER:
+ case QUOTED_IDENTIFIER:
+ case BACKQUOTED_IDENTIFIER:
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(618);
+ functionTemplate();
+ }
+ break;
+ case FUNCTION_ESC:
+ enterOuterAlt(_localctx, 2);
+ {
+ setState(619);
+ match(FUNCTION_ESC);
+ setState(620);
+ functionTemplate();
+ setState(621);
+ match(ESC_END);
+ }
+ break;
+ default:
+ throw new NoViableAltException(this);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ public static class FunctionTemplateContext extends ParserRuleContext {
+ public IdentifierContext identifier() {
+ return getRuleContext(IdentifierContext.class,0);
+ }
+ public List expression() {
+ return getRuleContexts(ExpressionContext.class);
+ }
+ public ExpressionContext expression(int i) {
+ return getRuleContext(ExpressionContext.class,i);
+ }
+ public SetQuantifierContext setQuantifier() {
+ return getRuleContext(SetQuantifierContext.class,0);
+ }
+ public FunctionTemplateContext(ParserRuleContext parent, int invokingState) {
+ super(parent, invokingState);
+ }
+ @Override public int getRuleIndex() { return RULE_functionTemplate; }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionTemplate(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionTemplate(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitFunctionTemplate(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+
+ public final FunctionTemplateContext functionTemplate() throws RecognitionException {
+ FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState());
+ enterRule(_localctx, 68, RULE_functionTemplate);
+ int _la;
+ try {
+ enterOuterAlt(_localctx, 1);
+ {
+ setState(625);
+ identifier();
+ setState(626);
+ match(T__0);
+ setState(638);
+ _la = _input.LA(1);
+ if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
+ {
+ setState(628);
+ _la = _input.LA(1);
+ if (_la==ALL || _la==DISTINCT) {
+ {
+ setState(627);
+ setQuantifier();
+ }
+ }
+
+ setState(630);
+ expression();
+ setState(635);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ while (_la==T__2) {
+ {
+ {
+ setState(631);
+ match(T__2);
+ setState(632);
+ expression();
+ }
+ }
+ setState(637);
+ _errHandler.sync(this);
+ _la = _input.LA(1);
+ }
+ }
+ }
+
+ setState(640);
+ match(T__1);
+ }
+ }
+ catch (RecognitionException re) {
+ _localctx.exception = re;
+ _errHandler.reportError(this, re);
+ _errHandler.recover(this, re);
+ }
+ finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
public static class ConstantContext extends ParserRuleContext {
public ConstantContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
@@ -3977,6 +4474,27 @@ class SqlBaseParser extends Parser {
else return visitor.visitChildren(this);
}
}
+ public static class TimestampEscapedLiteralContext extends ConstantContext {
+ public TerminalNode TIMESTAMP_ESC() { return getToken(SqlBaseParser.TIMESTAMP_ESC, 0); }
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
+ }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public TimestampEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTimestampEscapedLiteral(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTimestampEscapedLiteral(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitTimestampEscapedLiteral(this);
+ else return visitor.visitChildren(this);
+ }
+ }
public static class StringLiteralContext extends ConstantContext {
public List STRING() { return getTokens(SqlBaseParser.STRING); }
public TerminalNode STRING(int i) {
@@ -4014,6 +4532,48 @@ class SqlBaseParser extends Parser {
else return visitor.visitChildren(this);
}
}
+ public static class TimeEscapedLiteralContext extends ConstantContext {
+ public TerminalNode TIME_ESC() { return getToken(SqlBaseParser.TIME_ESC, 0); }
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
+ }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public TimeEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTimeEscapedLiteral(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTimeEscapedLiteral(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitTimeEscapedLiteral(this);
+ else return visitor.visitChildren(this);
+ }
+ }
+ public static class DateEscapedLiteralContext extends ConstantContext {
+ public TerminalNode DATE_ESC() { return getToken(SqlBaseParser.DATE_ESC, 0); }
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
+ }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public DateEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterDateEscapedLiteral(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitDateEscapedLiteral(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitDateEscapedLiteral(this);
+ else return visitor.visitChildren(this);
+ }
+ }
public static class NumericLiteralContext extends ConstantContext {
public NumberContext number() {
return getRuleContext(NumberContext.class,0);
@@ -4052,19 +4612,40 @@ class SqlBaseParser extends Parser {
else return visitor.visitChildren(this);
}
}
+ public static class GuidEscapedLiteralContext extends ConstantContext {
+ public TerminalNode GUID_ESC() { return getToken(SqlBaseParser.GUID_ESC, 0); }
+ public StringContext string() {
+ return getRuleContext(StringContext.class,0);
+ }
+ public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); }
+ public GuidEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); }
+ @Override
+ public void enterRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterGuidEscapedLiteral(this);
+ }
+ @Override
+ public void exitRule(ParseTreeListener listener) {
+ if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitGuidEscapedLiteral(this);
+ }
+ @Override
+ public T accept(ParseTreeVisitor extends T> visitor) {
+ if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor extends T>)visitor).visitGuidEscapedLiteral(this);
+ else return visitor.visitChildren(this);
+ }
+ }
public final ConstantContext constant() throws RecognitionException {
ConstantContext _localctx = new ConstantContext(_ctx, getState());
- enterRule(_localctx, 54, RULE_constant);
+ enterRule(_localctx, 70, RULE_constant);
try {
int _alt;
- setState(598);
+ setState(667);
switch (_input.LA(1)) {
case NULL:
_localctx = new NullLiteralContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(589);
+ setState(642);
match(NULL);
}
break;
@@ -4073,7 +4654,7 @@ class SqlBaseParser extends Parser {
_localctx = new NumericLiteralContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(590);
+ setState(643);
number();
}
break;
@@ -4082,7 +4663,7 @@ class SqlBaseParser extends Parser {
_localctx = new BooleanLiteralContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(591);
+ setState(644);
booleanValue();
}
break;
@@ -4090,7 +4671,7 @@ class SqlBaseParser extends Parser {
_localctx = new StringLiteralContext(_localctx);
enterOuterAlt(_localctx, 4);
{
- setState(593);
+ setState(646);
_errHandler.sync(this);
_alt = 1;
do {
@@ -4098,7 +4679,7 @@ class SqlBaseParser extends Parser {
case 1:
{
{
- setState(592);
+ setState(645);
match(STRING);
}
}
@@ -4106,9 +4687,9 @@ class SqlBaseParser extends Parser {
default:
throw new NoViableAltException(this);
}
- setState(595);
+ setState(648);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,84,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,89,_ctx);
} while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER );
}
break;
@@ -4116,10 +4697,58 @@ class SqlBaseParser extends Parser {
_localctx = new ParamLiteralContext(_localctx);
enterOuterAlt(_localctx, 5);
{
- setState(597);
+ setState(650);
match(PARAM);
}
break;
+ case DATE_ESC:
+ _localctx = new DateEscapedLiteralContext(_localctx);
+ enterOuterAlt(_localctx, 6);
+ {
+ setState(651);
+ match(DATE_ESC);
+ setState(652);
+ string();
+ setState(653);
+ match(ESC_END);
+ }
+ break;
+ case TIME_ESC:
+ _localctx = new TimeEscapedLiteralContext(_localctx);
+ enterOuterAlt(_localctx, 7);
+ {
+ setState(655);
+ match(TIME_ESC);
+ setState(656);
+ string();
+ setState(657);
+ match(ESC_END);
+ }
+ break;
+ case TIMESTAMP_ESC:
+ _localctx = new TimestampEscapedLiteralContext(_localctx);
+ enterOuterAlt(_localctx, 8);
+ {
+ setState(659);
+ match(TIMESTAMP_ESC);
+ setState(660);
+ string();
+ setState(661);
+ match(ESC_END);
+ }
+ break;
+ case GUID_ESC:
+ _localctx = new GuidEscapedLiteralContext(_localctx);
+ enterOuterAlt(_localctx, 9);
+ {
+ setState(663);
+ match(GUID_ESC);
+ setState(664);
+ string();
+ setState(665);
+ match(ESC_END);
+ }
+ break;
default:
throw new NoViableAltException(this);
}
@@ -4163,14 +4792,14 @@ class SqlBaseParser extends Parser {
public final ComparisonOperatorContext comparisonOperator() throws RecognitionException {
ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState());
- enterRule(_localctx, 56, RULE_comparisonOperator);
+ enterRule(_localctx, 72, RULE_comparisonOperator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(600);
+ setState(669);
_la = _input.LA(1);
- if ( !(((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (EQ - 72)) | (1L << (NEQ - 72)) | (1L << (LT - 72)) | (1L << (LTE - 72)) | (1L << (GT - 72)) | (1L << (GTE - 72)))) != 0)) ) {
+ if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (EQ - 80)) | (1L << (NEQ - 80)) | (1L << (LT - 80)) | (1L << (LTE - 80)) | (1L << (GT - 80)) | (1L << (GTE - 80)))) != 0)) ) {
_errHandler.recoverInline(this);
} else {
consume();
@@ -4212,12 +4841,12 @@ class SqlBaseParser extends Parser {
public final BooleanValueContext booleanValue() throws RecognitionException {
BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState());
- enterRule(_localctx, 58, RULE_booleanValue);
+ enterRule(_localctx, 74, RULE_booleanValue);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(602);
+ setState(671);
_la = _input.LA(1);
if ( !(_la==FALSE || _la==TRUE) ) {
_errHandler.recoverInline(this);
@@ -4270,12 +4899,12 @@ class SqlBaseParser extends Parser {
public final DataTypeContext dataType() throws RecognitionException {
DataTypeContext _localctx = new DataTypeContext(_ctx, getState());
- enterRule(_localctx, 60, RULE_dataType);
+ enterRule(_localctx, 76, RULE_dataType);
try {
_localctx = new PrimitiveDataTypeContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(604);
+ setState(673);
identifier();
}
}
@@ -4322,30 +4951,30 @@ class SqlBaseParser extends Parser {
public final QualifiedNameContext qualifiedName() throws RecognitionException {
QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState());
- enterRule(_localctx, 62, RULE_qualifiedName);
+ enterRule(_localctx, 78, RULE_qualifiedName);
try {
int _alt;
enterOuterAlt(_localctx, 1);
{
- setState(611);
+ setState(680);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,86,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,91,_ctx);
while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) {
if ( _alt==1 ) {
{
{
- setState(606);
+ setState(675);
identifier();
- setState(607);
+ setState(676);
match(DOT);
}
}
}
- setState(613);
+ setState(682);
_errHandler.sync(this);
- _alt = getInterpreter().adaptivePredict(_input,86,_ctx);
+ _alt = getInterpreter().adaptivePredict(_input,91,_ctx);
}
- setState(614);
+ setState(683);
identifier();
}
}
@@ -4388,15 +5017,15 @@ class SqlBaseParser extends Parser {
public final IdentifierContext identifier() throws RecognitionException {
IdentifierContext _localctx = new IdentifierContext(_ctx, getState());
- enterRule(_localctx, 64, RULE_identifier);
+ enterRule(_localctx, 80, RULE_identifier);
try {
- setState(618);
+ setState(687);
switch (_input.LA(1)) {
case QUOTED_IDENTIFIER:
case BACKQUOTED_IDENTIFIER:
enterOuterAlt(_localctx, 1);
{
- setState(616);
+ setState(685);
quoteIdentifier();
}
break;
@@ -4429,7 +5058,7 @@ class SqlBaseParser extends Parser {
case DIGIT_IDENTIFIER:
enterOuterAlt(_localctx, 2);
{
- setState(617);
+ setState(686);
unquoteIdentifier();
}
break;
@@ -4479,46 +5108,46 @@ class SqlBaseParser extends Parser {
public final TableIdentifierContext tableIdentifier() throws RecognitionException {
TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState());
- enterRule(_localctx, 66, RULE_tableIdentifier);
+ enterRule(_localctx, 82, RULE_tableIdentifier);
int _la;
try {
- setState(632);
+ setState(701);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
- setState(623);
+ setState(692);
_la = _input.LA(1);
if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) {
{
- setState(620);
+ setState(689);
((TableIdentifierContext)_localctx).catalog = identifier();
- setState(621);
+ setState(690);
match(T__3);
}
}
- setState(625);
+ setState(694);
match(TABLE_IDENTIFIER);
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
- setState(629);
+ setState(698);
_errHandler.sync(this);
- switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) {
+ switch ( getInterpreter().adaptivePredict(_input,94,_ctx) ) {
case 1:
{
- setState(626);
+ setState(695);
((TableIdentifierContext)_localctx).catalog = identifier();
- setState(627);
+ setState(696);
match(T__3);
}
break;
}
- setState(631);
+ setState(700);
((TableIdentifierContext)_localctx).name = identifier();
}
break;
@@ -4583,15 +5212,15 @@ class SqlBaseParser extends Parser {
public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException {
QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState());
- enterRule(_localctx, 68, RULE_quoteIdentifier);
+ enterRule(_localctx, 84, RULE_quoteIdentifier);
try {
- setState(636);
+ setState(705);
switch (_input.LA(1)) {
case QUOTED_IDENTIFIER:
_localctx = new QuotedIdentifierContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(634);
+ setState(703);
match(QUOTED_IDENTIFIER);
}
break;
@@ -4599,7 +5228,7 @@ class SqlBaseParser extends Parser {
_localctx = new BackQuotedIdentifierContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(635);
+ setState(704);
match(BACKQUOTED_IDENTIFIER);
}
break;
@@ -4669,15 +5298,15 @@ class SqlBaseParser extends Parser {
public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException {
UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState());
- enterRule(_localctx, 70, RULE_unquoteIdentifier);
+ enterRule(_localctx, 86, RULE_unquoteIdentifier);
try {
- setState(641);
+ setState(710);
switch (_input.LA(1)) {
case IDENTIFIER:
_localctx = new UnquotedIdentifierContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(638);
+ setState(707);
match(IDENTIFIER);
}
break;
@@ -4709,7 +5338,7 @@ class SqlBaseParser extends Parser {
_localctx = new UnquotedIdentifierContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(639);
+ setState(708);
nonReserved();
}
break;
@@ -4717,7 +5346,7 @@ class SqlBaseParser extends Parser {
_localctx = new DigitIdentifierContext(_localctx);
enterOuterAlt(_localctx, 3);
{
- setState(640);
+ setState(709);
match(DIGIT_IDENTIFIER);
}
break;
@@ -4784,15 +5413,15 @@ class SqlBaseParser extends Parser {
public final NumberContext number() throws RecognitionException {
NumberContext _localctx = new NumberContext(_ctx, getState());
- enterRule(_localctx, 72, RULE_number);
+ enterRule(_localctx, 88, RULE_number);
try {
- setState(645);
+ setState(714);
switch (_input.LA(1)) {
case DECIMAL_VALUE:
_localctx = new DecimalLiteralContext(_localctx);
enterOuterAlt(_localctx, 1);
{
- setState(643);
+ setState(712);
match(DECIMAL_VALUE);
}
break;
@@ -4800,7 +5429,7 @@ class SqlBaseParser extends Parser {
_localctx = new IntegerLiteralContext(_localctx);
enterOuterAlt(_localctx, 2);
{
- setState(644);
+ setState(713);
match(INTEGER_VALUE);
}
break;
@@ -4843,12 +5472,12 @@ class SqlBaseParser extends Parser {
public final StringContext string() throws RecognitionException {
StringContext _localctx = new StringContext(_ctx, getState());
- enterRule(_localctx, 74, RULE_string);
+ enterRule(_localctx, 90, RULE_string);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(647);
+ setState(716);
_la = _input.LA(1);
if ( !(_la==PARAM || _la==STRING) ) {
_errHandler.recoverInline(this);
@@ -4915,12 +5544,12 @@ class SqlBaseParser extends Parser {
public final NonReservedContext nonReserved() throws RecognitionException {
NonReservedContext _localctx = new NonReservedContext(_ctx, getState());
- enterRule(_localctx, 76, RULE_nonReserved);
+ enterRule(_localctx, 92, RULE_nonReserved);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
- setState(649);
+ setState(718);
_la = _input.LA(1);
if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) {
_errHandler.recoverInline(this);
@@ -4942,9 +5571,9 @@ class SqlBaseParser extends Parser {
public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) {
switch (ruleIndex) {
- case 21:
+ case 22:
return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex);
- case 25:
+ case 27:
return valueExpression_sempred((ValueExpressionContext)_localctx, predIndex);
}
return true;
@@ -4971,266 +5600,291 @@ class SqlBaseParser extends Parser {
}
public static final String _serializedATN =
- "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3d\u028e\4\2\t\2\4"+
+ "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d3\4\2\t\2\4"+
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
"\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+
- "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\3\2\3\2\3\2\3\3\3\3"+
- "\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4`\n\4\f\4\16\4c\13\4\3\4\5"+
- "\4f\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4o\n\4\f\4\16\4r\13\4\3\4\5\4u\n"+
- "\4\3\4\3\4\3\4\3\4\5\4{\n\4\3\4\5\4~\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+
- "\4\3\4\5\4\u0089\n\4\3\4\5\4\u008c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4"+
- "\5\4\u0096\n\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\5\4\u009f\n\4\3"+
- "\4\3\4\3\4\3\4\7\4\u00a5\n\4\f\4\16\4\u00a8\13\4\5\4\u00aa\n\4\3\4\3\4"+
- "\3\4\3\4\5\4\u00b0\n\4\3\4\3\4\5\4\u00b4\n\4\3\4\5\4\u00b7\n\4\3\4\5\4"+
- "\u00ba\n\4\3\4\5\4\u00bd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c4\n\4\3\5\3\5"+
- "\3\5\3\5\7\5\u00ca\n\5\f\5\16\5\u00cd\13\5\5\5\u00cf\n\5\3\5\3\5\3\6\3"+
- "\6\3\6\3\6\3\6\3\6\7\6\u00d9\n\6\f\6\16\6\u00dc\13\6\5\6\u00de\n\6\3\6"+
- "\3\6\5\6\u00e2\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00e9\n\7\3\b\3\b\5\b\u00ed"+
- "\n\b\3\t\3\t\5\t\u00f1\n\t\3\t\3\t\3\t\7\t\u00f6\n\t\f\t\16\t\u00f9\13"+
- "\t\3\t\5\t\u00fc\n\t\3\t\3\t\5\t\u0100\n\t\3\t\3\t\3\t\5\t\u0105\n\t\3"+
- "\t\3\t\5\t\u0109\n\t\3\n\3\n\3\n\3\n\7\n\u010f\n\n\f\n\16\n\u0112\13\n"+
- "\3\13\5\13\u0115\n\13\3\13\3\13\3\13\7\13\u011a\n\13\f\13\16\13\u011d"+
- "\13\13\3\f\3\f\3\r\3\r\3\r\3\r\7\r\u0125\n\r\f\r\16\r\u0128\13\r\5\r\u012a"+
- "\n\r\3\r\3\r\5\r\u012e\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20"+
- "\3\20\5\20\u013a\n\20\3\20\5\20\u013d\n\20\3\21\3\21\7\21\u0141\n\21\f"+
- "\21\16\21\u0144\13\21\3\22\3\22\3\22\3\22\5\22\u014a\n\22\3\22\3\22\3"+
- "\22\3\22\3\22\5\22\u0151\n\22\3\23\5\23\u0154\n\23\3\23\3\23\5\23\u0158"+
- "\n\23\3\23\3\23\5\23\u015c\n\23\3\23\3\23\5\23\u0160\n\23\5\23\u0162\n"+
- "\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\7\24\u016b\n\24\f\24\16\24\u016e"+
- "\13\24\3\24\3\24\5\24\u0172\n\24\3\25\3\25\5\25\u0176\n\25\3\25\5\25\u0179"+
- "\n\25\3\25\3\25\3\25\3\25\5\25\u017f\n\25\3\25\5\25\u0182\n\25\3\25\3"+
- "\25\3\25\3\25\5\25\u0188\n\25\3\25\5\25\u018b\n\25\5\25\u018d\n\25\3\26"+
- "\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+
- "\7\27\u019e\n\27\f\27\16\27\u01a1\13\27\3\27\3\27\3\27\3\27\3\27\3\27"+
- "\3\27\3\27\3\27\7\27\u01ac\n\27\f\27\16\27\u01af\13\27\3\27\3\27\3\27"+
- "\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u01ba\n\27\f\27\16\27\u01bd\13\27"+
- "\3\27\3\27\3\27\5\27\u01c2\n\27\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u01ca"+
- "\n\27\f\27\16\27\u01cd\13\27\3\30\3\30\5\30\u01d1\n\30\3\31\5\31\u01d4"+
- "\n\31\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u01dc\n\31\3\31\3\31\3\31\3\31"+
- "\3\31\7\31\u01e3\n\31\f\31\16\31\u01e6\13\31\3\31\3\31\3\31\5\31\u01eb"+
- "\n\31\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u01f3\n\31\3\31\3\31\3\31\5\31"+
- "\u01f8\n\31\3\31\3\31\3\31\3\31\5\31\u01fe\n\31\3\31\5\31\u0201\n\31\3"+
- "\32\3\32\3\32\5\32\u0206\n\32\3\33\3\33\3\33\3\33\5\33\u020c\n\33\3\33"+
- "\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\7\33\u0218\n\33\f\33\16"+
- "\33\u021b\13\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+
- "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0230\n\34\3\34\3\34\3\34"+
- "\3\34\5\34\u0236\n\34\3\34\3\34\3\34\7\34\u023b\n\34\f\34\16\34\u023e"+
- "\13\34\5\34\u0240\n\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3"+
- "\34\3\34\3\34\5\34\u024e\n\34\3\35\3\35\3\35\3\35\6\35\u0254\n\35\r\35"+
- "\16\35\u0255\3\35\5\35\u0259\n\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3!\7"+
- "!\u0264\n!\f!\16!\u0267\13!\3!\3!\3\"\3\"\5\"\u026d\n\"\3#\3#\3#\5#\u0272"+
- "\n#\3#\3#\3#\3#\5#\u0278\n#\3#\5#\u027b\n#\3$\3$\5$\u027f\n$\3%\3%\3%"+
- "\5%\u0284\n%\3&\3&\5&\u0288\n&\3\'\3\'\3(\3(\3(\2\4,\64)\2\4\6\b\n\f\16"+
- "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLN\2\20\b\2\7\7"+
- "\t\t\31\31,,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26"+
- "\4\2\7\7YY\4\2\r\r\25\25\4\2\7\7\27\27\3\2PQ\3\2RT\3\2JO\4\2\35\35CC\3"+
- "\2WX\20\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>?ABDEGG\u02e8\2"+
- "P\3\2\2\2\4S\3\2\2\2\6\u00c3\3\2\2\2\b\u00ce\3\2\2\2\n\u00d2\3\2\2\2\f"+
- "\u00e8\3\2\2\2\16\u00ea\3\2\2\2\20\u00ee\3\2\2\2\22\u010a\3\2\2\2\24\u0114"+
- "\3\2\2\2\26\u011e\3\2\2\2\30\u012d\3\2\2\2\32\u012f\3\2\2\2\34\u0135\3"+
- "\2\2\2\36\u0137\3\2\2\2 \u013e\3\2\2\2\"\u0150\3\2\2\2$\u0161\3\2\2\2"+
- "&\u0171\3\2\2\2(\u018c\3\2\2\2*\u018e\3\2\2\2,\u01c1\3\2\2\2.\u01ce\3"+
- "\2\2\2\60\u0200\3\2\2\2\62\u0202\3\2\2\2\64\u020b\3\2\2\2\66\u024d\3\2"+
- "\2\28\u0258\3\2\2\2:\u025a\3\2\2\2<\u025c\3\2\2\2>\u025e\3\2\2\2@\u0265"+
- "\3\2\2\2B\u026c\3\2\2\2D\u027a\3\2\2\2F\u027e\3\2\2\2H\u0283\3\2\2\2J"+
- "\u0287\3\2\2\2L\u0289\3\2\2\2N\u028b\3\2\2\2PQ\5\6\4\2QR\7\2\2\3R\3\3"+
- "\2\2\2ST\5*\26\2TU\7\2\2\3U\5\3\2\2\2V\u00c4\5\b\5\2We\7\33\2\2Xa\7\3"+
- "\2\2YZ\78\2\2Z`\t\2\2\2[\\\7\36\2\2\\`\t\3\2\2]^\7G\2\2^`\5<\37\2_Y\3"+
- "\2\2\2_[\3\2\2\2_]\3\2\2\2`c\3\2\2\2a_\3\2\2\2ab\3\2\2\2bd\3\2\2\2ca\3"+
- "\2\2\2df\7\4\2\2eX\3\2\2\2ef\3\2\2\2fg\3\2\2\2g\u00c4\5\6\4\2ht\7\24\2"+
- "\2ip\7\3\2\2jk\78\2\2ko\t\4\2\2lm\7\36\2\2mo\t\3\2\2nj\3\2\2\2nl\3\2\2"+
- "\2or\3\2\2\2pn\3\2\2\2pq\3\2\2\2qs\3\2\2\2rp\3\2\2\2su\7\4\2\2ti\3\2\2"+
- "\2tu\3\2\2\2uv\3\2\2\2v\u00c4\5\6\4\2wx\7>\2\2x}\7A\2\2y{\7*\2\2zy\3\2"+
- "\2\2z{\3\2\2\2{|\3\2\2\2|~\5\62\32\2}z\3\2\2\2}~\3\2\2\2~\u00c4\3\2\2"+
- "\2\177\u0080\7>\2\2\u0080\u0081\7\23\2\2\u0081\u0082\t\5\2\2\u0082\u00c4"+
- "\5D#\2\u0083\u0084\t\6\2\2\u0084\u00c4\5D#\2\u0085\u0086\7>\2\2\u0086"+
- "\u008b\7!\2\2\u0087\u0089\7*\2\2\u0088\u0087\3\2\2\2\u0088\u0089\3\2\2"+
- "\2\u0089\u008a\3\2\2\2\u008a\u008c\5\62\32\2\u008b\u0088\3\2\2\2\u008b"+
- "\u008c\3\2\2\2\u008c\u00c4\3\2\2\2\u008d\u008e\7>\2\2\u008e\u00c4\7<\2"+
- "\2\u008f\u0090\7?\2\2\u0090\u00c4\7\22\2\2\u0091\u0092\7?\2\2\u0092\u0098"+
- "\7A\2\2\u0093\u0095\7\21\2\2\u0094\u0096\7*\2\2\u0095\u0094\3\2\2\2\u0095"+
- "\u0096\3\2\2\2\u0096\u0097\3\2\2\2\u0097\u0099\5\62\32\2\u0098\u0093\3"+
- "\2\2\2\u0098\u0099\3\2\2\2\u0099\u009e\3\2\2\2\u009a\u009c\7*\2\2\u009b"+
- "\u009a\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009f\5\62"+
- "\32\2\u009e\u009b\3\2\2\2\u009e\u009f\3\2\2\2\u009f\u00a9\3\2\2\2\u00a0"+
- "\u00a1\7D\2\2\u00a1\u00a6\5L\'\2\u00a2\u00a3\7\5\2\2\u00a3\u00a5\5L\'"+
- "\2\u00a4\u00a2\3\2\2\2\u00a5\u00a8\3\2\2\2\u00a6\u00a4\3\2\2\2\u00a6\u00a7"+
- "\3\2\2\2\u00a7\u00aa\3\2\2\2\u00a8\u00a6\3\2\2\2\u00a9\u00a0\3\2\2\2\u00a9"+
- "\u00aa\3\2\2\2\u00aa\u00c4\3\2\2\2\u00ab\u00ac\7?\2\2\u00ac\u00af\7\23"+
- "\2\2\u00ad\u00ae\7\21\2\2\u00ae\u00b0\5L\'\2\u00af\u00ad\3\2\2\2\u00af"+
- "\u00b0\3\2\2\2\u00b0\u00b6\3\2\2\2\u00b1\u00b3\7@\2\2\u00b2\u00b4\7*\2"+
- "\2\u00b3\u00b2\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5\u00b7"+
- "\5\62\32\2\u00b6\u00b1\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00bc\3\2\2\2"+
- "\u00b8\u00ba\7*\2\2\u00b9\u00b8\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00bb"+
- "\3\2\2\2\u00bb\u00bd\5\62\32\2\u00bc\u00b9\3\2\2\2\u00bc\u00bd\3\2\2\2"+
- "\u00bd\u00c4\3\2\2\2\u00be\u00bf\7?\2\2\u00bf\u00c4\7E\2\2\u00c0\u00c1"+
- "\7?\2\2\u00c1\u00c2\7@\2\2\u00c2\u00c4\7E\2\2\u00c3V\3\2\2\2\u00c3W\3"+
- "\2\2\2\u00c3h\3\2\2\2\u00c3w\3\2\2\2\u00c3\177\3\2\2\2\u00c3\u0083\3\2"+
- "\2\2\u00c3\u0085\3\2\2\2\u00c3\u008d\3\2\2\2\u00c3\u008f\3\2\2\2\u00c3"+
- "\u0091\3\2\2\2\u00c3\u00ab\3\2\2\2\u00c3\u00be\3\2\2\2\u00c3\u00c0\3\2"+
- "\2\2\u00c4\7\3\2\2\2\u00c5\u00c6\7I\2\2\u00c6\u00cb\5\32\16\2\u00c7\u00c8"+
- "\7\5\2\2\u00c8\u00ca\5\32\16\2\u00c9\u00c7\3\2\2\2\u00ca\u00cd\3\2\2\2"+
- "\u00cb\u00c9\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cf\3\2\2\2\u00cd\u00cb"+
- "\3\2\2\2\u00ce\u00c5\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0"+
- "\u00d1\5\n\6\2\u00d1\t\3\2\2\2\u00d2\u00dd\5\f\7\2\u00d3\u00d4\7\64\2"+
- "\2\u00d4\u00d5\7\17\2\2\u00d5\u00da\5\16\b\2\u00d6\u00d7\7\5\2\2\u00d7"+
- "\u00d9\5\16\b\2\u00d8\u00d6\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3"+
- "\2\2\2\u00da\u00db\3\2\2\2\u00db\u00de\3\2\2\2\u00dc\u00da\3\2\2\2\u00dd"+
- "\u00d3\3\2\2\2\u00dd\u00de\3\2\2\2\u00de\u00e1\3\2\2\2\u00df\u00e0\7+"+
- "\2\2\u00e0\u00e2\t\7\2\2\u00e1\u00df\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2"+
- "\13\3\2\2\2\u00e3\u00e9\5\20\t\2\u00e4\u00e5\7\3\2\2\u00e5\u00e6\5\n\6"+
- "\2\u00e6\u00e7\7\4\2\2\u00e7\u00e9\3\2\2\2\u00e8\u00e3\3\2\2\2\u00e8\u00e4"+
- "\3\2\2\2\u00e9\r\3\2\2\2\u00ea\u00ec\5*\26\2\u00eb\u00ed\t\b\2\2\u00ec"+
- "\u00eb\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed\17\3\2\2\2\u00ee\u00f0\7=\2\2"+
- "\u00ef\u00f1\5\34\17\2\u00f0\u00ef\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\u00f2"+
- "\3\2\2\2\u00f2\u00f7\5\36\20\2\u00f3\u00f4\7\5\2\2\u00f4\u00f6\5\36\20"+
- "\2\u00f5\u00f3\3\2\2\2\u00f6\u00f9\3\2\2\2\u00f7\u00f5\3\2\2\2\u00f7\u00f8"+
- "\3\2\2\2\u00f8\u00fb\3\2\2\2\u00f9\u00f7\3\2\2\2\u00fa\u00fc\5\22\n\2"+
- "\u00fb\u00fa\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00ff\3\2\2\2\u00fd\u00fe"+
- "\7H\2\2\u00fe\u0100\5,\27\2\u00ff\u00fd\3\2\2\2\u00ff\u0100\3\2\2\2\u0100"+
- "\u0104\3\2\2\2\u0101\u0102\7#\2\2\u0102\u0103\7\17\2\2\u0103\u0105\5\24"+
- "\13\2\u0104\u0101\3\2\2\2\u0104\u0105\3\2\2\2\u0105\u0108\3\2\2\2\u0106"+
- "\u0107\7$\2\2\u0107\u0109\5,\27\2\u0108\u0106\3\2\2\2\u0108\u0109\3\2"+
- "\2\2\u0109\21\3\2\2\2\u010a\u010b\7\37\2\2\u010b\u0110\5 \21\2\u010c\u010d"+
- "\7\5\2\2\u010d\u010f\5 \21\2\u010e\u010c\3\2\2\2\u010f\u0112\3\2\2\2\u0110"+
- "\u010e\3\2\2\2\u0110\u0111\3\2\2\2\u0111\23\3\2\2\2\u0112\u0110\3\2\2"+
- "\2\u0113\u0115\5\34\17\2\u0114\u0113\3\2\2\2\u0114\u0115\3\2\2\2\u0115"+
- "\u0116\3\2\2\2\u0116\u011b\5\26\f\2\u0117\u0118\7\5\2\2\u0118\u011a\5"+
- "\26\f\2\u0119\u0117\3\2\2\2\u011a\u011d\3\2\2\2\u011b\u0119\3\2\2\2\u011b"+
- "\u011c\3\2\2\2\u011c\25\3\2\2\2\u011d\u011b\3\2\2\2\u011e\u011f\5\30\r"+
- "\2\u011f\27\3\2\2\2\u0120\u0129\7\3\2\2\u0121\u0126\5*\26\2\u0122\u0123"+
- "\7\5\2\2\u0123\u0125\5*\26\2\u0124\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126"+
- "\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u012a\3\2\2\2\u0128\u0126\3\2"+
- "\2\2\u0129\u0121\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012b\3\2\2\2\u012b"+
- "\u012e\7\4\2\2\u012c\u012e\5*\26\2\u012d\u0120\3\2\2\2\u012d\u012c\3\2"+
- "\2\2\u012e\31\3\2\2\2\u012f\u0130\5B\"\2\u0130\u0131\7\f\2\2\u0131\u0132"+
- "\7\3\2\2\u0132\u0133\5\n\6\2\u0133\u0134\7\4\2\2\u0134\33\3\2\2\2\u0135"+
- "\u0136\t\t\2\2\u0136\35\3\2\2\2\u0137\u013c\5*\26\2\u0138\u013a\7\f\2"+
- "\2\u0139\u0138\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u013d"+
- "\5B\"\2\u013c\u0139\3\2\2\2\u013c\u013d\3\2\2\2\u013d\37\3\2\2\2\u013e"+
- "\u0142\5(\25\2\u013f\u0141\5\"\22\2\u0140\u013f\3\2\2\2\u0141\u0144\3"+
- "\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143!\3\2\2\2\u0144\u0142"+
- "\3\2\2\2\u0145\u0146\5$\23\2\u0146\u0147\7(\2\2\u0147\u0149\5(\25\2\u0148"+
- "\u014a\5&\24\2\u0149\u0148\3\2\2\2\u0149\u014a\3\2\2\2\u014a\u0151\3\2"+
- "\2\2\u014b\u014c\7.\2\2\u014c\u014d\5$\23\2\u014d\u014e\7(\2\2\u014e\u014f"+
- "\5(\25\2\u014f\u0151\3\2\2\2\u0150\u0145\3\2\2\2\u0150\u014b\3\2\2\2\u0151"+
- "#\3\2\2\2\u0152\u0154\7&\2\2\u0153\u0152\3\2\2\2\u0153\u0154\3\2\2\2\u0154"+
- "\u0162\3\2\2\2\u0155\u0157\7)\2\2\u0156\u0158\7\65\2\2\u0157\u0156\3\2"+
- "\2\2\u0157\u0158\3\2\2\2\u0158\u0162\3\2\2\2\u0159\u015b\79\2\2\u015a"+
- "\u015c\7\65\2\2\u015b\u015a\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u0162\3"+
- "\2\2\2\u015d\u015f\7 \2\2\u015e\u0160\7\65\2\2\u015f\u015e\3\2\2\2\u015f"+
- "\u0160\3\2\2\2\u0160\u0162\3\2\2\2\u0161\u0153\3\2\2\2\u0161\u0155\3\2"+
- "\2\2\u0161\u0159\3\2\2\2\u0161\u015d\3\2\2\2\u0162%\3\2\2\2\u0163\u0164"+
- "\7\61\2\2\u0164\u0172\5,\27\2\u0165\u0166\7F\2\2\u0166\u0167\7\3\2\2\u0167"+
- "\u016c\5B\"\2\u0168\u0169\7\5\2\2\u0169\u016b\5B\"\2\u016a\u0168\3\2\2"+
- "\2\u016b\u016e\3\2\2\2\u016c\u016a\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u016f"+
- "\3\2\2\2\u016e\u016c\3\2\2\2\u016f\u0170\7\4\2\2\u0170\u0172\3\2\2\2\u0171"+
- "\u0163\3\2\2\2\u0171\u0165\3\2\2\2\u0172\'\3\2\2\2\u0173\u0178\5D#\2\u0174"+
- "\u0176\7\f\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0177\3\2"+
- "\2\2\u0177\u0179\5@!\2\u0178\u0175\3\2\2\2\u0178\u0179\3\2\2\2\u0179\u018d"+
- "\3\2\2\2\u017a\u017b\7\3\2\2\u017b\u017c\5\n\6\2\u017c\u0181\7\4\2\2\u017d"+
- "\u017f\7\f\2\2\u017e\u017d\3\2\2\2\u017e\u017f\3\2\2\2\u017f\u0180\3\2"+
- "\2\2\u0180\u0182\5@!\2\u0181\u017e\3\2\2\2\u0181\u0182\3\2\2\2\u0182\u018d"+
- "\3\2\2\2\u0183\u0184\7\3\2\2\u0184\u0185\5 \21\2\u0185\u018a\7\4\2\2\u0186"+
- "\u0188\7\f\2\2\u0187\u0186\3\2\2\2\u0187\u0188\3\2\2\2\u0188\u0189\3\2"+
- "\2\2\u0189\u018b\5@!\2\u018a\u0187\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d"+
- "\3\2\2\2\u018c\u0173\3\2\2\2\u018c\u017a\3\2\2\2\u018c\u0183\3\2\2\2\u018d"+
- ")\3\2\2\2\u018e\u018f\5,\27\2\u018f+\3\2\2\2\u0190\u0191\b\27\1\2\u0191"+
- "\u0192\7/\2\2\u0192\u01c2\5,\27\n\u0193\u0194\7\32\2\2\u0194\u0195\7\3"+
- "\2\2\u0195\u0196\5\b\5\2\u0196\u0197\7\4\2\2\u0197\u01c2\3\2\2\2\u0198"+
- "\u0199\7;\2\2\u0199\u019a\7\3\2\2\u019a\u019f\5L\'\2\u019b\u019c\7\5\2"+
- "\2\u019c\u019e\5L\'\2\u019d\u019b\3\2\2\2\u019e\u01a1\3\2\2\2\u019f\u019d"+
- "\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a2\3\2\2\2\u01a1\u019f\3\2\2\2\u01a2"+
- "\u01a3\7\4\2\2\u01a3\u01c2\3\2\2\2\u01a4\u01a5\7-\2\2\u01a5\u01a6\7\3"+
- "\2\2\u01a6\u01a7\5@!\2\u01a7\u01a8\7\5\2\2\u01a8\u01ad\5L\'\2\u01a9\u01aa"+
- "\7\5\2\2\u01aa\u01ac\5L\'\2\u01ab\u01a9\3\2\2\2\u01ac\u01af\3\2\2\2\u01ad"+
- "\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01b0\3\2\2\2\u01af\u01ad\3\2"+
- "\2\2\u01b0\u01b1\7\4\2\2\u01b1\u01c2\3\2\2\2\u01b2\u01b3\7-\2\2\u01b3"+
- "\u01b4\7\3\2\2\u01b4\u01b5\5L\'\2\u01b5\u01b6\7\5\2\2\u01b6\u01bb\5L\'"+
- "\2\u01b7\u01b8\7\5\2\2\u01b8\u01ba\5L\'\2\u01b9\u01b7\3\2\2\2\u01ba\u01bd"+
- "\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bb\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd"+
- "\u01bb\3\2\2\2\u01be\u01bf\7\4\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01c2\5."+
- "\30\2\u01c1\u0190\3\2\2\2\u01c1\u0193\3\2\2\2\u01c1\u0198\3\2\2\2\u01c1"+
- "\u01a4\3\2\2\2\u01c1\u01b2\3\2\2\2\u01c1\u01c0\3\2\2\2\u01c2\u01cb\3\2"+
- "\2\2\u01c3\u01c4\f\4\2\2\u01c4\u01c5\7\n\2\2\u01c5\u01ca\5,\27\5\u01c6"+
- "\u01c7\f\3\2\2\u01c7\u01c8\7\63\2\2\u01c8\u01ca\5,\27\4\u01c9\u01c3\3"+
- "\2\2\2\u01c9\u01c6\3\2\2\2\u01ca\u01cd\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cb"+
- "\u01cc\3\2\2\2\u01cc-\3\2\2\2\u01cd\u01cb\3\2\2\2\u01ce\u01d0\5\64\33"+
- "\2\u01cf\u01d1\5\60\31\2\u01d0\u01cf\3\2\2\2\u01d0\u01d1\3\2\2\2\u01d1"+
- "/\3\2\2\2\u01d2\u01d4\7/\2\2\u01d3\u01d2\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4"+
- "\u01d5\3\2\2\2\u01d5\u01d6\7\16\2\2\u01d6\u01d7\5\64\33\2\u01d7\u01d8"+
- "\7\n\2\2\u01d8\u01d9\5\64\33\2\u01d9\u0201\3\2\2\2\u01da\u01dc\7/\2\2"+
- "\u01db\u01da\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01de"+
- "\7%\2\2\u01de\u01df\7\3\2\2\u01df\u01e4\5*\26\2\u01e0\u01e1\7\5\2\2\u01e1"+
- "\u01e3\5*\26\2\u01e2\u01e0\3\2\2\2\u01e3\u01e6\3\2\2\2\u01e4\u01e2\3\2"+
- "\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e7\3\2\2\2\u01e6\u01e4\3\2\2\2\u01e7"+
- "\u01e8\7\4\2\2\u01e8\u0201\3\2\2\2\u01e9\u01eb\7/\2\2\u01ea\u01e9\3\2"+
- "\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ed\7%\2\2\u01ed"+
- "\u01ee\7\3\2\2\u01ee\u01ef\5\b\5\2\u01ef\u01f0\7\4\2\2\u01f0\u0201\3\2"+
- "\2\2\u01f1\u01f3\7/\2\2\u01f2\u01f1\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3"+
- "\u01f4\3\2\2\2\u01f4\u01f5\7*\2\2\u01f5\u0201\5\62\32\2\u01f6\u01f8\7"+
- "/\2\2\u01f7\u01f6\3\2\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01f9\3\2\2\2\u01f9"+
- "\u01fa\7:\2\2\u01fa\u0201\5L\'\2\u01fb\u01fd\7\'\2\2\u01fc\u01fe\7/\2"+
- "\2\u01fd\u01fc\3\2\2\2\u01fd\u01fe\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff\u0201"+
- "\7\60\2\2\u0200\u01d3\3\2\2\2\u0200\u01db\3\2\2\2\u0200\u01ea\3\2\2\2"+
- "\u0200\u01f2\3\2\2\2\u0200\u01f7\3\2\2\2\u0200\u01fb\3\2\2\2\u0201\61"+
- "\3\2\2\2\u0202\u0205\5L\'\2\u0203\u0204\7\30\2\2\u0204\u0206\5L\'\2\u0205"+
- "\u0203\3\2\2\2\u0205\u0206\3\2\2\2\u0206\63\3\2\2\2\u0207\u0208\b\33\1"+
- "\2\u0208\u020c\5\66\34\2\u0209\u020a\t\n\2\2\u020a\u020c\5\64\33\6\u020b"+
- "\u0207\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u0219\3\2\2\2\u020d\u020e\f\5"+
- "\2\2\u020e\u020f\t\13\2\2\u020f\u0218\5\64\33\6\u0210\u0211\f\4\2\2\u0211"+
- "\u0212\t\n\2\2\u0212\u0218\5\64\33\5\u0213\u0214\f\3\2\2\u0214\u0215\5"+
- ":\36\2\u0215\u0216\5\64\33\4\u0216\u0218\3\2\2\2\u0217\u020d\3\2\2\2\u0217"+
- "\u0210\3\2\2\2\u0217\u0213\3\2\2\2\u0218\u021b\3\2\2\2\u0219\u0217\3\2"+
- "\2\2\u0219\u021a\3\2\2\2\u021a\65\3\2\2\2\u021b\u0219\3\2\2\2\u021c\u021d"+
- "\7\20\2\2\u021d\u021e\7\3\2\2\u021e\u021f\5*\26\2\u021f\u0220\7\f\2\2"+
- "\u0220\u0221\5> \2\u0221\u0222\7\4\2\2\u0222\u024e\3\2\2\2\u0223\u0224"+
- "\7\34\2\2\u0224\u0225\7\3\2\2\u0225\u0226\5B\"\2\u0226\u0227\7\37\2\2"+
- "\u0227\u0228\5\64\33\2\u0228\u0229\7\4\2\2\u0229\u024e\3\2\2\2\u022a\u024e"+
- "\58\35\2\u022b\u024e\7R\2\2\u022c\u022d\5@!\2\u022d\u022e\7V\2\2\u022e"+
- "\u0230\3\2\2\2\u022f\u022c\3\2\2\2\u022f\u0230\3\2\2\2\u0230\u0231\3\2"+
- "\2\2\u0231\u024e\7R\2\2\u0232\u0233\5B\"\2\u0233\u023f\7\3\2\2\u0234\u0236"+
- "\5\34\17\2\u0235\u0234\3\2\2\2\u0235\u0236\3\2\2\2\u0236\u0237\3\2\2\2"+
- "\u0237\u023c\5*\26\2\u0238\u0239\7\5\2\2\u0239\u023b\5*\26\2\u023a\u0238"+
- "\3\2\2\2\u023b\u023e\3\2\2\2\u023c\u023a\3\2\2\2\u023c\u023d\3\2\2\2\u023d"+
- "\u0240\3\2\2\2\u023e\u023c\3\2\2\2\u023f\u0235\3\2\2\2\u023f\u0240\3\2"+
- "\2\2\u0240\u0241\3\2\2\2\u0241\u0242\7\4\2\2\u0242\u024e\3\2\2\2\u0243"+
- "\u0244\7\3\2\2\u0244\u0245\5\b\5\2\u0245\u0246\7\4\2\2\u0246\u024e\3\2"+
- "\2\2\u0247\u024e\5B\"\2\u0248\u024e\5@!\2\u0249\u024a\7\3\2\2\u024a\u024b"+
- "\5*\26\2\u024b\u024c\7\4\2\2\u024c\u024e\3\2\2\2\u024d\u021c\3\2\2\2\u024d"+
- "\u0223\3\2\2\2\u024d\u022a\3\2\2\2\u024d\u022b\3\2\2\2\u024d\u022f\3\2"+
- "\2\2\u024d\u0232\3\2\2\2\u024d\u0243\3\2\2\2\u024d\u0247\3\2\2\2\u024d"+
- "\u0248\3\2\2\2\u024d\u0249\3\2\2\2\u024e\67\3\2\2\2\u024f\u0259\7\60\2"+
- "\2\u0250\u0259\5J&\2\u0251\u0259\5<\37\2\u0252\u0254\7X\2\2\u0253\u0252"+
- "\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0253\3\2\2\2\u0255\u0256\3\2\2\2\u0256"+
- "\u0259\3\2\2\2\u0257\u0259\7W\2\2\u0258\u024f\3\2\2\2\u0258\u0250\3\2"+
- "\2\2\u0258\u0251\3\2\2\2\u0258\u0253\3\2\2\2\u0258\u0257\3\2\2\2\u0259"+
- "9\3\2\2\2\u025a\u025b\t\f\2\2\u025b;\3\2\2\2\u025c\u025d\t\r\2\2\u025d"+
- "=\3\2\2\2\u025e\u025f\5B\"\2\u025f?\3\2\2\2\u0260\u0261\5B\"\2\u0261\u0262"+
- "\7V\2\2\u0262\u0264\3\2\2\2\u0263\u0260\3\2\2\2\u0264\u0267\3\2\2\2\u0265"+
- "\u0263\3\2\2\2\u0265\u0266\3\2\2\2\u0266\u0268\3\2\2\2\u0267\u0265\3\2"+
- "\2\2\u0268\u0269\5B\"\2\u0269A\3\2\2\2\u026a\u026d\5F$\2\u026b\u026d\5"+
- "H%\2\u026c\u026a\3\2\2\2\u026c\u026b\3\2\2\2\u026dC\3\2\2\2\u026e\u026f"+
- "\5B\"\2\u026f\u0270\7\6\2\2\u0270\u0272\3\2\2\2\u0271\u026e\3\2\2\2\u0271"+
- "\u0272\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u027b\7]\2\2\u0274\u0275\5B\""+
- "\2\u0275\u0276\7\6\2\2\u0276\u0278\3\2\2\2\u0277\u0274\3\2\2\2\u0277\u0278"+
- "\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027b\5B\"\2\u027a\u0271\3\2\2\2\u027a"+
- "\u0277\3\2\2\2\u027bE\3\2\2\2\u027c\u027f\7^\2\2\u027d\u027f\7_\2\2\u027e"+
- "\u027c\3\2\2\2\u027e\u027d\3\2\2\2\u027fG\3\2\2\2\u0280\u0284\7[\2\2\u0281"+
- "\u0284\5N(\2\u0282\u0284\7\\\2\2\u0283\u0280\3\2\2\2\u0283\u0281\3\2\2"+
- "\2\u0283\u0282\3\2\2\2\u0284I\3\2\2\2\u0285\u0288\7Z\2\2\u0286\u0288\7"+
- "Y\2\2\u0287\u0285\3\2\2\2\u0287\u0286\3\2\2\2\u0288K\3\2\2\2\u0289\u028a"+
- "\t\16\2\2\u028aM\3\2\2\2\u028b\u028c\t\17\2\2\u028cO\3\2\2\2`_aenptz}"+
- "\u0088\u008b\u0095\u0098\u009b\u009e\u00a6\u00a9\u00af\u00b3\u00b6\u00b9"+
- "\u00bc\u00c3\u00cb\u00ce\u00da\u00dd\u00e1\u00e8\u00ec\u00f0\u00f7\u00fb"+
- "\u00ff\u0104\u0108\u0110\u0114\u011b\u0126\u0129\u012d\u0139\u013c\u0142"+
- "\u0149\u0150\u0153\u0157\u015b\u015f\u0161\u016c\u0171\u0175\u0178\u017e"+
- "\u0181\u0187\u018a\u018c\u019f\u01ad\u01bb\u01c1\u01c9\u01cb\u01d0\u01d3"+
- "\u01db\u01e4\u01ea\u01f2\u01f7\u01fd\u0200\u0205\u020b\u0217\u0219\u022f"+
- "\u0235\u023c\u023f\u024d\u0255\u0258\u0265\u026c\u0271\u0277\u027a\u027e"+
- "\u0283\u0287";
+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+
+ ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3"+
+ "\4\3\4\3\4\3\4\3\4\3\4\7\4p\n\4\f\4\16\4s\13\4\3\4\5\4v\n\4\3\4\3\4\3"+
+ "\4\3\4\3\4\3\4\3\4\7\4\177\n\4\f\4\16\4\u0082\13\4\3\4\5\4\u0085\n\4\3"+
+ "\4\3\4\3\4\3\4\5\4\u008b\n\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4"+
+ "\3\4\3\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+
+ "\4\3\4\5\4\u00a6\n\4\3\4\5\4\u00a9\n\4\3\4\5\4\u00ac\n\4\3\4\5\4\u00af"+
+ "\n\4\3\4\3\4\3\4\3\4\7\4\u00b5\n\4\f\4\16\4\u00b8\13\4\5\4\u00ba\n\4\3"+
+ "\4\3\4\3\4\3\4\5\4\u00c0\n\4\3\4\3\4\5\4\u00c4\n\4\3\4\5\4\u00c7\n\4\3"+
+ "\4\5\4\u00ca\n\4\3\4\5\4\u00cd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d4\n\4\3"+
+ "\5\3\5\3\5\3\5\7\5\u00da\n\5\f\5\16\5\u00dd\13\5\5\5\u00df\n\5\3\5\3\5"+
+ "\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00e9\n\6\f\6\16\6\u00ec\13\6\5\6\u00ee\n"+
+ "\6\3\6\5\6\u00f1\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00f8\n\7\3\b\3\b\3\b\3\b"+
+ "\3\b\5\b\u00ff\n\b\3\t\3\t\5\t\u0103\n\t\3\n\3\n\5\n\u0107\n\n\3\n\3\n"+
+ "\3\n\7\n\u010c\n\n\f\n\16\n\u010f\13\n\3\n\5\n\u0112\n\n\3\n\3\n\5\n\u0116"+
+ "\n\n\3\n\3\n\3\n\5\n\u011b\n\n\3\n\3\n\5\n\u011f\n\n\3\13\3\13\3\13\3"+
+ "\13\7\13\u0125\n\13\f\13\16\13\u0128\13\13\3\f\5\f\u012b\n\f\3\f\3\f\3"+
+ "\f\7\f\u0130\n\f\f\f\16\f\u0133\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+
+ "\u013b\n\16\f\16\16\16\u013e\13\16\5\16\u0140\n\16\3\16\3\16\5\16\u0144"+
+ "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0150\n\21"+
+ "\3\21\5\21\u0153\n\21\3\22\3\22\7\22\u0157\n\22\f\22\16\22\u015a\13\22"+
+ "\3\23\3\23\3\23\3\23\5\23\u0160\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0167"+
+ "\n\23\3\24\5\24\u016a\n\24\3\24\3\24\5\24\u016e\n\24\3\24\3\24\5\24\u0172"+
+ "\n\24\3\24\3\24\5\24\u0176\n\24\5\24\u0178\n\24\3\25\3\25\3\25\3\25\3"+
+ "\25\3\25\3\25\7\25\u0181\n\25\f\25\16\25\u0184\13\25\3\25\3\25\5\25\u0188"+
+ "\n\25\3\26\3\26\5\26\u018c\n\26\3\26\5\26\u018f\n\26\3\26\3\26\3\26\3"+
+ "\26\5\26\u0195\n\26\3\26\5\26\u0198\n\26\3\26\3\26\3\26\3\26\5\26\u019e"+
+ "\n\26\3\26\5\26\u01a1\n\26\5\26\u01a3\n\26\3\27\3\27\3\30\3\30\3\30\3"+
+ "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01b4\n\30\f\30"+
+ "\16\30\u01b7\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01c2"+
+ "\n\30\f\30\16\30\u01c5\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+
+ "\30\7\30\u01d0\n\30\f\30\16\30\u01d3\13\30\3\30\3\30\3\30\5\30\u01d8\n"+
+ "\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e0\n\30\f\30\16\30\u01e3\13"+
+ "\30\3\31\3\31\5\31\u01e7\n\31\3\32\5\32\u01ea\n\32\3\32\3\32\3\32\3\32"+
+ "\3\32\3\32\5\32\u01f2\n\32\3\32\3\32\3\32\3\32\3\32\7\32\u01f9\n\32\f"+
+ "\32\16\32\u01fc\13\32\3\32\3\32\3\32\5\32\u0201\n\32\3\32\3\32\3\32\3"+
+ "\32\3\32\3\32\5\32\u0209\n\32\3\32\3\32\3\32\5\32\u020e\n\32\3\32\3\32"+
+ "\3\32\3\32\5\32\u0214\n\32\3\32\5\32\u0217\n\32\3\33\3\33\5\33\u021b\n"+
+ "\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0223\n\34\3\35\3\35\3\35\3\35"+
+ "\5\35\u0229\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\7\35"+
+ "\u0235\n\35\f\35\16\35\u0238\13\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+
+ "\5\36\u0241\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+
+ "\3\36\5\36\u024f\n\36\3\37\3\37\3\37\3\37\3\37\5\37\u0256\n\37\3 \3 \3"+
+ " \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0264\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3"+
+ "#\3#\3#\3#\3#\5#\u0272\n#\3$\3$\3$\5$\u0277\n$\3$\3$\3$\7$\u027c\n$\f"+
+ "$\16$\u027f\13$\5$\u0281\n$\3$\3$\3%\3%\3%\3%\6%\u0289\n%\r%\16%\u028a"+
+ "\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\5%\u029e\n%\3&\3&"+
+ "\3\'\3\'\3(\3(\3)\3)\3)\7)\u02a9\n)\f)\16)\u02ac\13)\3)\3)\3*\3*\5*\u02b2"+
+ "\n*\3+\3+\3+\5+\u02b7\n+\3+\3+\3+\3+\5+\u02bd\n+\3+\5+\u02c0\n+\3,\3,"+
+ "\5,\u02c4\n,\3-\3-\3-\5-\u02c9\n-\3.\3.\5.\u02cd\n.\3/\3/\3\60\3\60\3"+
+ "\60\2\4.8\61\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+
+ "\668:<>@BDFHJLNPRTVXZ\\^\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\""+
+ "BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7"+
+ "\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33"+
+ "\36\36!\",,\62\62\668:<>?ABDEGG\u032e\2`\3\2\2\2\4c\3\2\2\2\6\u00d3\3"+
+ "\2\2\2\b\u00de\3\2\2\2\n\u00e2\3\2\2\2\f\u00f7\3\2\2\2\16\u00fe\3\2\2"+
+ "\2\20\u0100\3\2\2\2\22\u0104\3\2\2\2\24\u0120\3\2\2\2\26\u012a\3\2\2\2"+
+ "\30\u0134\3\2\2\2\32\u0143\3\2\2\2\34\u0145\3\2\2\2\36\u014b\3\2\2\2 "+
+ "\u014d\3\2\2\2\"\u0154\3\2\2\2$\u0166\3\2\2\2&\u0177\3\2\2\2(\u0187\3"+
+ "\2\2\2*\u01a2\3\2\2\2,\u01a4\3\2\2\2.\u01d7\3\2\2\2\60\u01e4\3\2\2\2\62"+
+ "\u0216\3\2\2\2\64\u0218\3\2\2\2\66\u0222\3\2\2\28\u0228\3\2\2\2:\u024e"+
+ "\3\2\2\2<\u0255\3\2\2\2>\u0257\3\2\2\2@\u0263\3\2\2\2B\u0265\3\2\2\2D"+
+ "\u0271\3\2\2\2F\u0273\3\2\2\2H\u029d\3\2\2\2J\u029f\3\2\2\2L\u02a1\3\2"+
+ "\2\2N\u02a3\3\2\2\2P\u02aa\3\2\2\2R\u02b1\3\2\2\2T\u02bf\3\2\2\2V\u02c3"+
+ "\3\2\2\2X\u02c8\3\2\2\2Z\u02cc\3\2\2\2\\\u02ce\3\2\2\2^\u02d0\3\2\2\2"+
+ "`a\5\6\4\2ab\7\2\2\3b\3\3\2\2\2cd\5,\27\2de\7\2\2\3e\5\3\2\2\2f\u00d4"+
+ "\5\b\5\2gu\7\33\2\2hq\7\3\2\2ij\78\2\2jp\t\2\2\2kl\7\36\2\2lp\t\3\2\2"+
+ "mn\7G\2\2np\5L\'\2oi\3\2\2\2ok\3\2\2\2om\3\2\2\2ps\3\2\2\2qo\3\2\2\2q"+
+ "r\3\2\2\2rt\3\2\2\2sq\3\2\2\2tv\7\4\2\2uh\3\2\2\2uv\3\2\2\2vw\3\2\2\2"+
+ "w\u00d4\5\6\4\2x\u0084\7\24\2\2y\u0080\7\3\2\2z{\78\2\2{\177\t\4\2\2|"+
+ "}\7\36\2\2}\177\t\3\2\2~z\3\2\2\2~|\3\2\2\2\177\u0082\3\2\2\2\u0080~\3"+
+ "\2\2\2\u0080\u0081\3\2\2\2\u0081\u0083\3\2\2\2\u0082\u0080\3\2\2\2\u0083"+
+ "\u0085\7\4\2\2\u0084y\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0086\3\2\2\2"+
+ "\u0086\u00d4\5\6\4\2\u0087\u0088\7>\2\2\u0088\u008d\7A\2\2\u0089\u008b"+
+ "\7*\2\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+
+ "\u008e\5\64\33\2\u008d\u008a\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u00d4\3"+
+ "\2\2\2\u008f\u0090\7>\2\2\u0090\u0091\7\23\2\2\u0091\u0092\t\5\2\2\u0092"+
+ "\u00d4\5T+\2\u0093\u0094\t\6\2\2\u0094\u00d4\5T+\2\u0095\u0096\7>\2\2"+
+ "\u0096\u009b\7!\2\2\u0097\u0099\7*\2\2\u0098\u0097\3\2\2\2\u0098\u0099"+
+ "\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009c\5\64\33\2\u009b\u0098\3\2\2\2"+
+ "\u009b\u009c\3\2\2\2\u009c\u00d4\3\2\2\2\u009d\u009e\7>\2\2\u009e\u00d4"+
+ "\7<\2\2\u009f\u00a0\7?\2\2\u00a0\u00d4\7\22\2\2\u00a1\u00a2\7?\2\2\u00a2"+
+ "\u00a8\7A\2\2\u00a3\u00a5\7\21\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a4\3\2"+
+ "\2\2\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00a9\5\64\33\2\u00a8"+
+ "\u00a3\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ae\3\2\2\2\u00aa\u00ac\7*"+
+ "\2\2\u00ab\u00aa\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad"+
+ "\u00af\5\64\33\2\u00ae\u00ab\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b9\3"+
+ "\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5\\/\2\u00b2\u00b3\7\5\2\2\u00b3"+
+ "\u00b5\5\\/\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b4\3\2"+
+ "\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b9"+
+ "\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00d4\3\2\2\2\u00bb\u00bc\7?"+
+ "\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be\u00c0\5\\/\2\u00bf"+
+ "\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c6\3\2\2\2\u00c1\u00c3\7@"+
+ "\2\2\u00c2\u00c4\7*\2\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4"+
+ "\u00c5\3\2\2\2\u00c5\u00c7\5\64\33\2\u00c6\u00c1\3\2\2\2\u00c6\u00c7\3"+
+ "\2\2\2\u00c7\u00cc\3\2\2\2\u00c8\u00ca\7*\2\2\u00c9\u00c8\3\2\2\2\u00c9"+
+ "\u00ca\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cd\5\64\33\2\u00cc\u00c9\3"+
+ "\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d4\3\2\2\2\u00ce\u00cf\7?\2\2\u00cf"+
+ "\u00d4\7E\2\2\u00d0\u00d1\7?\2\2\u00d1\u00d2\7@\2\2\u00d2\u00d4\7E\2\2"+
+ "\u00d3f\3\2\2\2\u00d3g\3\2\2\2\u00d3x\3\2\2\2\u00d3\u0087\3\2\2\2\u00d3"+
+ "\u008f\3\2\2\2\u00d3\u0093\3\2\2\2\u00d3\u0095\3\2\2\2\u00d3\u009d\3\2"+
+ "\2\2\u00d3\u009f\3\2\2\2\u00d3\u00a1\3\2\2\2\u00d3\u00bb\3\2\2\2\u00d3"+
+ "\u00ce\3\2\2\2\u00d3\u00d0\3\2\2\2\u00d4\7\3\2\2\2\u00d5\u00d6\7I\2\2"+
+ "\u00d6\u00db\5\34\17\2\u00d7\u00d8\7\5\2\2\u00d8\u00da\5\34\17\2\u00d9"+
+ "\u00d7\3\2\2\2\u00da\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2\u00db\u00dc\3\2"+
+ "\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00de\u00d5\3\2\2\2\u00de"+
+ "\u00df\3\2\2\2\u00df\u00e0\3\2\2\2\u00e0\u00e1\5\n\6\2\u00e1\t\3\2\2\2"+
+ "\u00e2\u00ed\5\16\b\2\u00e3\u00e4\7\64\2\2\u00e4\u00e5\7\17\2\2\u00e5"+
+ "\u00ea\5\20\t\2\u00e6\u00e7\7\5\2\2\u00e7\u00e9\5\20\t\2\u00e8\u00e6\3"+
+ "\2\2\2\u00e9\u00ec\3\2\2\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb"+
+ "\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ed\u00e3\3\2\2\2\u00ed\u00ee\3\2"+
+ "\2\2\u00ee\u00f0\3\2\2\2\u00ef\u00f1\5\f\7\2\u00f0\u00ef\3\2\2\2\u00f0"+
+ "\u00f1\3\2\2\2\u00f1\13\3\2\2\2\u00f2\u00f3\7+\2\2\u00f3\u00f8\t\7\2\2"+
+ "\u00f4\u00f5\7L\2\2\u00f5\u00f6\t\7\2\2\u00f6\u00f8\7Q\2\2\u00f7\u00f2"+
+ "\3\2\2\2\u00f7\u00f4\3\2\2\2\u00f8\r\3\2\2\2\u00f9\u00ff\5\22\n\2\u00fa"+
+ "\u00fb\7\3\2\2\u00fb\u00fc\5\n\6\2\u00fc\u00fd\7\4\2\2\u00fd\u00ff\3\2"+
+ "\2\2\u00fe\u00f9\3\2\2\2\u00fe\u00fa\3\2\2\2\u00ff\17\3\2\2\2\u0100\u0102"+
+ "\5,\27\2\u0101\u0103\t\b\2\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2\2\u0103"+
+ "\21\3\2\2\2\u0104\u0106\7=\2\2\u0105\u0107\5\36\20\2\u0106\u0105\3\2\2"+
+ "\2\u0106\u0107\3\2\2\2\u0107\u0108\3\2\2\2\u0108\u010d\5 \21\2\u0109\u010a"+
+ "\7\5\2\2\u010a\u010c\5 \21\2\u010b\u0109\3\2\2\2\u010c\u010f\3\2\2\2\u010d"+
+ "\u010b\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u0111\3\2\2\2\u010f\u010d\3\2"+
+ "\2\2\u0110\u0112\5\24\13\2\u0111\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112"+
+ "\u0115\3\2\2\2\u0113\u0114\7H\2\2\u0114\u0116\5.\30\2\u0115\u0113\3\2"+
+ "\2\2\u0115\u0116\3\2\2\2\u0116\u011a\3\2\2\2\u0117\u0118\7#\2\2\u0118"+
+ "\u0119\7\17\2\2\u0119\u011b\5\26\f\2\u011a\u0117\3\2\2\2\u011a\u011b\3"+
+ "\2\2\2\u011b\u011e\3\2\2\2\u011c\u011d\7$\2\2\u011d\u011f\5.\30\2\u011e"+
+ "\u011c\3\2\2\2\u011e\u011f\3\2\2\2\u011f\23\3\2\2\2\u0120\u0121\7\37\2"+
+ "\2\u0121\u0126\5\"\22\2\u0122\u0123\7\5\2\2\u0123\u0125\5\"\22\2\u0124"+
+ "\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2"+
+ "\2\2\u0127\25\3\2\2\2\u0128\u0126\3\2\2\2\u0129\u012b\5\36\20\2\u012a"+
+ "\u0129\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0131\5\30"+
+ "\r\2\u012d\u012e\7\5\2\2\u012e\u0130\5\30\r\2\u012f\u012d\3\2\2\2\u0130"+
+ "\u0133\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\27\3\2\2"+
+ "\2\u0133\u0131\3\2\2\2\u0134\u0135\5\32\16\2\u0135\31\3\2\2\2\u0136\u013f"+
+ "\7\3\2\2\u0137\u013c\5,\27\2\u0138\u0139\7\5\2\2\u0139\u013b\5,\27\2\u013a"+
+ "\u0138\3\2\2\2\u013b\u013e\3\2\2\2\u013c\u013a\3\2\2\2\u013c\u013d\3\2"+
+ "\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013f\u0137\3\2\2\2\u013f"+
+ "\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0144\7\4\2\2\u0142\u0144\5,"+
+ "\27\2\u0143\u0136\3\2\2\2\u0143\u0142\3\2\2\2\u0144\33\3\2\2\2\u0145\u0146"+
+ "\5R*\2\u0146\u0147\7\f\2\2\u0147\u0148\7\3\2\2\u0148\u0149\5\n\6\2\u0149"+
+ "\u014a\7\4\2\2\u014a\35\3\2\2\2\u014b\u014c\t\t\2\2\u014c\37\3\2\2\2\u014d"+
+ "\u0152\5,\27\2\u014e\u0150\7\f\2\2\u014f\u014e\3\2\2\2\u014f\u0150\3\2"+
+ "\2\2\u0150\u0151\3\2\2\2\u0151\u0153\5R*\2\u0152\u014f\3\2\2\2\u0152\u0153"+
+ "\3\2\2\2\u0153!\3\2\2\2\u0154\u0158\5*\26\2\u0155\u0157\5$\23\2\u0156"+
+ "\u0155\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158\u0159\3\2"+
+ "\2\2\u0159#\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u015c\5&\24\2\u015c\u015d"+
+ "\7(\2\2\u015d\u015f\5*\26\2\u015e\u0160\5(\25\2\u015f\u015e\3\2\2\2\u015f"+
+ "\u0160\3\2\2\2\u0160\u0167\3\2\2\2\u0161\u0162\7.\2\2\u0162\u0163\5&\24"+
+ "\2\u0163\u0164\7(\2\2\u0164\u0165\5*\26\2\u0165\u0167\3\2\2\2\u0166\u015b"+
+ "\3\2\2\2\u0166\u0161\3\2\2\2\u0167%\3\2\2\2\u0168\u016a\7&\2\2\u0169\u0168"+
+ "\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u0178\3\2\2\2\u016b\u016d\7)\2\2\u016c"+
+ "\u016e\7\65\2\2\u016d\u016c\3\2\2\2\u016d\u016e\3\2\2\2\u016e\u0178\3"+
+ "\2\2\2\u016f\u0171\79\2\2\u0170\u0172\7\65\2\2\u0171\u0170\3\2\2\2\u0171"+
+ "\u0172\3\2\2\2\u0172\u0178\3\2\2\2\u0173\u0175\7 \2\2\u0174\u0176\7\65"+
+ "\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0178\3\2\2\2\u0177"+
+ "\u0169\3\2\2\2\u0177\u016b\3\2\2\2\u0177\u016f\3\2\2\2\u0177\u0173\3\2"+
+ "\2\2\u0178\'\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u0188\5.\30\2\u017b\u017c"+
+ "\7F\2\2\u017c\u017d\7\3\2\2\u017d\u0182\5R*\2\u017e\u017f\7\5\2\2\u017f"+
+ "\u0181\5R*\2\u0180\u017e\3\2\2\2\u0181\u0184\3\2\2\2\u0182\u0180\3\2\2"+
+ "\2\u0182\u0183\3\2\2\2\u0183\u0185\3\2\2\2\u0184\u0182\3\2\2\2\u0185\u0186"+
+ "\7\4\2\2\u0186\u0188\3\2\2\2\u0187\u0179\3\2\2\2\u0187\u017b\3\2\2\2\u0188"+
+ ")\3\2\2\2\u0189\u018e\5T+\2\u018a\u018c\7\f\2\2\u018b\u018a\3\2\2\2\u018b"+
+ "\u018c\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\5P)\2\u018e\u018b\3\2\2"+
+ "\2\u018e\u018f\3\2\2\2\u018f\u01a3\3\2\2\2\u0190\u0191\7\3\2\2\u0191\u0192"+
+ "\5\n\6\2\u0192\u0197\7\4\2\2\u0193\u0195\7\f\2\2\u0194\u0193\3\2\2\2\u0194"+
+ "\u0195\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\5P)\2\u0197\u0194\3\2\2"+
+ "\2\u0197\u0198\3\2\2\2\u0198\u01a3\3\2\2\2\u0199\u019a\7\3\2\2\u019a\u019b"+
+ "\5\"\22\2\u019b\u01a0\7\4\2\2\u019c\u019e\7\f\2\2\u019d\u019c\3\2\2\2"+
+ "\u019d\u019e\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a1\5P)\2\u01a0\u019d"+
+ "\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\3\2\2\2\u01a2\u0189\3\2\2\2\u01a2"+
+ "\u0190\3\2\2\2\u01a2\u0199\3\2\2\2\u01a3+\3\2\2\2\u01a4\u01a5\5.\30\2"+
+ "\u01a5-\3\2\2\2\u01a6\u01a7\b\30\1\2\u01a7\u01a8\7/\2\2\u01a8\u01d8\5"+
+ ".\30\n\u01a9\u01aa\7\32\2\2\u01aa\u01ab\7\3\2\2\u01ab\u01ac\5\b\5\2\u01ac"+
+ "\u01ad\7\4\2\2\u01ad\u01d8\3\2\2\2\u01ae\u01af\7;\2\2\u01af\u01b0\7\3"+
+ "\2\2\u01b0\u01b5\5\\/\2\u01b1\u01b2\7\5\2\2\u01b2\u01b4\5\\/\2\u01b3\u01b1"+
+ "\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+
+ "\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01b9\7\4\2\2\u01b9\u01d8\3\2"+
+ "\2\2\u01ba\u01bb\7-\2\2\u01bb\u01bc\7\3\2\2\u01bc\u01bd\5P)\2\u01bd\u01be"+
+ "\7\5\2\2\u01be\u01c3\5\\/\2\u01bf\u01c0\7\5\2\2\u01c0\u01c2\5\\/\2\u01c1"+
+ "\u01bf\3\2\2\2\u01c2\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c3\u01c4\3\2"+
+ "\2\2\u01c4\u01c6\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c6\u01c7\7\4\2\2\u01c7"+
+ "\u01d8\3\2\2\2\u01c8\u01c9\7-\2\2\u01c9\u01ca\7\3\2\2\u01ca\u01cb\5\\"+
+ "/\2\u01cb\u01cc\7\5\2\2\u01cc\u01d1\5\\/\2\u01cd\u01ce\7\5\2\2\u01ce\u01d0"+
+ "\5\\/\2\u01cf\u01cd\3\2\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d1"+
+ "\u01d2\3\2\2\2\u01d2\u01d4\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d4\u01d5\7\4"+
+ "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d8\5\60\31\2\u01d7\u01a6\3\2\2\2\u01d7"+
+ "\u01a9\3\2\2\2\u01d7\u01ae\3\2\2\2\u01d7\u01ba\3\2\2\2\u01d7\u01c8\3\2"+
+ "\2\2\u01d7\u01d6\3\2\2\2\u01d8\u01e1\3\2\2\2\u01d9\u01da\f\4\2\2\u01da"+
+ "\u01db\7\n\2\2\u01db\u01e0\5.\30\5\u01dc\u01dd\f\3\2\2\u01dd\u01de\7\63"+
+ "\2\2\u01de\u01e0\5.\30\4\u01df\u01d9\3\2\2\2\u01df\u01dc\3\2\2\2\u01e0"+
+ "\u01e3\3\2\2\2\u01e1\u01df\3\2\2\2\u01e1\u01e2\3\2\2\2\u01e2/\3\2\2\2"+
+ "\u01e3\u01e1\3\2\2\2\u01e4\u01e6\58\35\2\u01e5\u01e7\5\62\32\2\u01e6\u01e5"+
+ "\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\61\3\2\2\2\u01e8\u01ea\7/\2\2\u01e9"+
+ "\u01e8\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\16"+
+ "\2\2\u01ec\u01ed\58\35\2\u01ed\u01ee\7\n\2\2\u01ee\u01ef\58\35\2\u01ef"+
+ "\u0217\3\2\2\2\u01f0\u01f2\7/\2\2\u01f1\u01f0\3\2\2\2\u01f1\u01f2\3\2"+
+ "\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\7%\2\2\u01f4\u01f5\7\3\2\2\u01f5"+
+ "\u01fa\5,\27\2\u01f6\u01f7\7\5\2\2\u01f7\u01f9\5,\27\2\u01f8\u01f6\3\2"+
+ "\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+
+ "\u01fd\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01fe\7\4\2\2\u01fe\u0217\3\2"+
+ "\2\2\u01ff\u0201\7/\2\2\u0200\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201"+
+ "\u0202\3\2\2\2\u0202\u0203\7%\2\2\u0203\u0204\7\3\2\2\u0204\u0205\5\b"+
+ "\5\2\u0205\u0206\7\4\2\2\u0206\u0217\3\2\2\2\u0207\u0209\7/\2\2\u0208"+
+ "\u0207\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\7*"+
+ "\2\2\u020b\u0217\5\64\33\2\u020c\u020e\7/\2\2\u020d\u020c\3\2\2\2\u020d"+
+ "\u020e\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\7:\2\2\u0210\u0217\5\\"+
+ "/\2\u0211\u0213\7\'\2\2\u0212\u0214\7/\2\2\u0213\u0212\3\2\2\2\u0213\u0214"+
+ "\3\2\2\2\u0214\u0215\3\2\2\2\u0215\u0217\7\60\2\2\u0216\u01e9\3\2\2\2"+
+ "\u0216\u01f1\3\2\2\2\u0216\u0200\3\2\2\2\u0216\u0208\3\2\2\2\u0216\u020d"+
+ "\3\2\2\2\u0216\u0211\3\2\2\2\u0217\63\3\2\2\2\u0218\u021a\5\\/\2\u0219"+
+ "\u021b\5\66\34\2\u021a\u0219\3\2\2\2\u021a\u021b\3\2\2\2\u021b\65\3\2"+
+ "\2\2\u021c\u021d\7\30\2\2\u021d\u0223\5\\/\2\u021e\u021f\7J\2\2\u021f"+
+ "\u0220\5\\/\2\u0220\u0221\7Q\2\2\u0221\u0223\3\2\2\2\u0222\u021c\3\2\2"+
+ "\2\u0222\u021e\3\2\2\2\u0223\67\3\2\2\2\u0224\u0225\b\35\1\2\u0225\u0229"+
+ "\5:\36\2\u0226\u0227\t\n\2\2\u0227\u0229\58\35\6\u0228\u0224\3\2\2\2\u0228"+
+ "\u0226\3\2\2\2\u0229\u0236\3\2\2\2\u022a\u022b\f\5\2\2\u022b\u022c\t\13"+
+ "\2\2\u022c\u0235\58\35\6\u022d\u022e\f\4\2\2\u022e\u022f\t\n\2\2\u022f"+
+ "\u0235\58\35\5\u0230\u0231\f\3\2\2\u0231\u0232\5J&\2\u0232\u0233\58\35"+
+ "\4\u0233\u0235\3\2\2\2\u0234\u022a\3\2\2\2\u0234\u022d\3\2\2\2\u0234\u0230"+
+ "\3\2\2\2\u0235\u0238\3\2\2\2\u0236\u0234\3\2\2\2\u0236\u0237\3\2\2\2\u0237"+
+ "9\3\2\2\2\u0238\u0236\3\2\2\2\u0239\u024f\5<\37\2\u023a\u024f\5@!\2\u023b"+
+ "\u024f\5H%\2\u023c\u024f\7Z\2\2\u023d\u023e\5P)\2\u023e\u023f\7^\2\2\u023f"+
+ "\u0241\3\2\2\2\u0240\u023d\3\2\2\2\u0240\u0241\3\2\2\2\u0241\u0242\3\2"+
+ "\2\2\u0242\u024f\7Z\2\2\u0243\u024f\5D#\2\u0244\u0245\7\3\2\2\u0245\u0246"+
+ "\5\b\5\2\u0246\u0247\7\4\2\2\u0247\u024f\3\2\2\2\u0248\u024f\5R*\2\u0249"+
+ "\u024f\5P)\2\u024a\u024b\7\3\2\2\u024b\u024c\5,\27\2\u024c\u024d\7\4\2"+
+ "\2\u024d\u024f\3\2\2\2\u024e\u0239\3\2\2\2\u024e\u023a\3\2\2\2\u024e\u023b"+
+ "\3\2\2\2\u024e\u023c\3\2\2\2\u024e\u0240\3\2\2\2\u024e\u0243\3\2\2\2\u024e"+
+ "\u0244\3\2\2\2\u024e\u0248\3\2\2\2\u024e\u0249\3\2\2\2\u024e\u024a\3\2"+
+ "\2\2\u024f;\3\2\2\2\u0250\u0256\5> \2\u0251\u0252\7K\2\2\u0252\u0253\5"+
+ "> \2\u0253\u0254\7Q\2\2\u0254\u0256\3\2\2\2\u0255\u0250\3\2\2\2\u0255"+
+ "\u0251\3\2\2\2\u0256=\3\2\2\2\u0257\u0258\7\20\2\2\u0258\u0259\7\3\2\2"+
+ "\u0259\u025a\5,\27\2\u025a\u025b\7\f\2\2\u025b\u025c\5N(\2\u025c\u025d"+
+ "\7\4\2\2\u025d?\3\2\2\2\u025e\u0264\5B\"\2\u025f\u0260\7K\2\2\u0260\u0261"+
+ "\5B\"\2\u0261\u0262\7Q\2\2\u0262\u0264\3\2\2\2\u0263\u025e\3\2\2\2\u0263"+
+ "\u025f\3\2\2\2\u0264A\3\2\2\2\u0265\u0266\7\34\2\2\u0266\u0267\7\3\2\2"+
+ "\u0267\u0268\5R*\2\u0268\u0269\7\37\2\2\u0269\u026a\58\35\2\u026a\u026b"+
+ "\7\4\2\2\u026bC\3\2\2\2\u026c\u0272\5F$\2\u026d\u026e\7K\2\2\u026e\u026f"+
+ "\5F$\2\u026f\u0270\7Q\2\2\u0270\u0272\3\2\2\2\u0271\u026c\3\2\2\2\u0271"+
+ "\u026d\3\2\2\2\u0272E\3\2\2\2\u0273\u0274\5R*\2\u0274\u0280\7\3\2\2\u0275"+
+ "\u0277\5\36\20\2\u0276\u0275\3\2\2\2\u0276\u0277\3\2\2\2\u0277\u0278\3"+
+ "\2\2\2\u0278\u027d\5,\27\2\u0279\u027a\7\5\2\2\u027a\u027c\5,\27\2\u027b"+
+ "\u0279\3\2\2\2\u027c\u027f\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027e\3\2"+
+ "\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u0280\u0276\3\2\2\2\u0280"+
+ "\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282\u0283\7\4\2\2\u0283G\3\2\2\2"+
+ "\u0284\u029e\7\60\2\2\u0285\u029e\5Z.\2\u0286\u029e\5L\'\2\u0287\u0289"+
+ "\7`\2\2\u0288\u0287\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u0288\3\2\2\2\u028a"+
+ "\u028b\3\2\2\2\u028b\u029e\3\2\2\2\u028c\u029e\7_\2\2\u028d\u028e\7M\2"+
+ "\2\u028e\u028f\5\\/\2\u028f\u0290\7Q\2\2\u0290\u029e\3\2\2\2\u0291\u0292"+
+ "\7N\2\2\u0292\u0293\5\\/\2\u0293\u0294\7Q\2\2\u0294\u029e\3\2\2\2\u0295"+
+ "\u0296\7O\2\2\u0296\u0297\5\\/\2\u0297\u0298\7Q\2\2\u0298\u029e\3\2\2"+
+ "\2\u0299\u029a\7P\2\2\u029a\u029b\5\\/\2\u029b\u029c\7Q\2\2\u029c\u029e"+
+ "\3\2\2\2\u029d\u0284\3\2\2\2\u029d\u0285\3\2\2\2\u029d\u0286\3\2\2\2\u029d"+
+ "\u0288\3\2\2\2\u029d\u028c\3\2\2\2\u029d\u028d\3\2\2\2\u029d\u0291\3\2"+
+ "\2\2\u029d\u0295\3\2\2\2\u029d\u0299\3\2\2\2\u029eI\3\2\2\2\u029f\u02a0"+
+ "\t\f\2\2\u02a0K\3\2\2\2\u02a1\u02a2\t\r\2\2\u02a2M\3\2\2\2\u02a3\u02a4"+
+ "\5R*\2\u02a4O\3\2\2\2\u02a5\u02a6\5R*\2\u02a6\u02a7\7^\2\2\u02a7\u02a9"+
+ "\3\2\2\2\u02a8\u02a5\3\2\2\2\u02a9\u02ac\3\2\2\2\u02aa\u02a8\3\2\2\2\u02aa"+
+ "\u02ab\3\2\2\2\u02ab\u02ad\3\2\2\2\u02ac\u02aa\3\2\2\2\u02ad\u02ae\5R"+
+ "*\2\u02aeQ\3\2\2\2\u02af\u02b2\5V,\2\u02b0\u02b2\5X-\2\u02b1\u02af\3\2"+
+ "\2\2\u02b1\u02b0\3\2\2\2\u02b2S\3\2\2\2\u02b3\u02b4\5R*\2\u02b4\u02b5"+
+ "\7\6\2\2\u02b5\u02b7\3\2\2\2\u02b6\u02b3\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7"+
+ "\u02b8\3\2\2\2\u02b8\u02c0\7e\2\2\u02b9\u02ba\5R*\2\u02ba\u02bb\7\6\2"+
+ "\2\u02bb\u02bd\3\2\2\2\u02bc\u02b9\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02be"+
+ "\3\2\2\2\u02be\u02c0\5R*\2\u02bf\u02b6\3\2\2\2\u02bf\u02bc\3\2\2\2\u02c0"+
+ "U\3\2\2\2\u02c1\u02c4\7f\2\2\u02c2\u02c4\7g\2\2\u02c3\u02c1\3\2\2\2\u02c3"+
+ "\u02c2\3\2\2\2\u02c4W\3\2\2\2\u02c5\u02c9\7c\2\2\u02c6\u02c9\5^\60\2\u02c7"+
+ "\u02c9\7d\2\2\u02c8\u02c5\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c8\u02c7\3\2"+
+ "\2\2\u02c9Y\3\2\2\2\u02ca\u02cd\7b\2\2\u02cb\u02cd\7a\2\2\u02cc\u02ca"+
+ "\3\2\2\2\u02cc\u02cb\3\2\2\2\u02cd[\3\2\2\2\u02ce\u02cf\t\16\2\2\u02cf"+
+ "]\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1_\3\2\2\2eoqu~\u0080\u0084\u008a\u008d"+
+ "\u0098\u009b\u00a5\u00a8\u00ab\u00ae\u00b6\u00b9\u00bf\u00c3\u00c6\u00c9"+
+ "\u00cc\u00d3\u00db\u00de\u00ea\u00ed\u00f0\u00f7\u00fe\u0102\u0106\u010d"+
+ "\u0111\u0115\u011a\u011e\u0126\u012a\u0131\u013c\u013f\u0143\u014f\u0152"+
+ "\u0158\u015f\u0166\u0169\u016d\u0171\u0175\u0177\u0182\u0187\u018b\u018e"+
+ "\u0194\u0197\u019d\u01a0\u01a2\u01b5\u01c3\u01d1\u01d7\u01df\u01e1\u01e6"+
+ "\u01e9\u01f1\u01fa\u0200\u0208\u020d\u0213\u0216\u021a\u0222\u0228\u0234"+
+ "\u0236\u0240\u024e\u0255\u0263\u0271\u0276\u027d\u0280\u028a\u029d\u02aa"+
+ "\u02b1\u02b6\u02bc\u02bf\u02c3\u02c8\u02cc";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java
index 35ce6cd0029..6745b3fa89b 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
@@ -123,6 +118,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#limitClause}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitLimitClause(SqlBaseParser.LimitClauseContext ctx);
/**
* Visit a parse tree produced by the {@code queryPrimaryDefault}
* labeled alternative in {@link SqlBaseParser#queryTerm}.
@@ -311,6 +312,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitPattern(SqlBaseParser.PatternContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#patternEscape}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
/**
* Visit a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link SqlBaseParser#valueExpression}.
@@ -368,12 +375,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor {
*/
T visitStar(SqlBaseParser.StarContext ctx);
/**
- * Visit a parse tree produced by the {@code functionCall}
+ * Visit a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
- T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx);
+ T visitFunction(SqlBaseParser.FunctionContext ctx);
/**
* Visit a parse tree produced by the {@code subqueryExpression}
* labeled alternative in {@link SqlBaseParser#primaryExpression}.
@@ -402,6 +409,42 @@ interface SqlBaseVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#castExpression}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitCastExpression(SqlBaseParser.CastExpressionContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#castTemplate}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#extractExpression}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#extractTemplate}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#functionExpression}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
+ /**
+ * Visit a parse tree produced by {@link SqlBaseParser#functionTemplate}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
/**
* Visit a parse tree produced by the {@code nullLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
@@ -437,6 +480,34 @@ interface SqlBaseVisitor extends ParseTreeVisitor {
* @return the visitor result
*/
T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code dateEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code timeEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code timestampEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
+ /**
+ * Visit a parse tree produced by the {@code guidEscapedLiteral}
+ * labeled alternative in {@link SqlBaseParser#constant}.
+ * @param ctx the parse tree
+ * @return the visitor result
+ */
+ T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#comparisonOperator}.
* @param ctx the parse tree
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java
index b7fe9178f91..2824b5502a8 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java
@@ -33,10 +33,13 @@ import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.function.BiFunction;
import java.util.function.Function;
+import static java.lang.String.format;
+
public class SqlParser {
private static final Logger log = Loggers.getLogger(SqlParser.class);
@@ -102,16 +105,30 @@ public class SqlParser {
if (DEBUG) {
debug(parser);
+ tokenStream.fill();
+
+ for (Token t : tokenStream.getTokens()) {
+ String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType());
+ String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType());
+ log.info(format(Locale.ROOT, " %-15s '%s'",
+ symbolicName == null ? literalName : symbolicName,
+ t.getText()));
+ };
}
ParserRuleContext tree = parseFunction.apply(parser);
+ if (DEBUG) {
+ log.info("Parse tree {} " + tree.toStringTree());
+ }
+
return visitor.apply(new AstBuilder(paramTokens), tree);
}
private void debug(SqlBaseParser parser) {
+
// when debugging, use the exact prediction mode (needed for diagnostics as well)
- parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
+ parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.addParseListener(parser.new TraceListener());
diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java
new file mode 100644
index 00000000000..11ad24582ef
--- /dev/null
+++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java
@@ -0,0 +1,237 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.sql.parser;
+
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
+import org.elasticsearch.xpack.sql.expression.Expression;
+import org.elasticsearch.xpack.sql.expression.Literal;
+import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
+import org.elasticsearch.xpack.sql.expression.function.Function;
+import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
+import org.elasticsearch.xpack.sql.expression.regex.Like;
+import org.elasticsearch.xpack.sql.expression.regex.LikePattern;
+import org.elasticsearch.xpack.sql.plan.logical.Limit;
+import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
+import org.elasticsearch.xpack.sql.plan.logical.With;
+import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
+import org.elasticsearch.xpack.sql.type.DataType;
+import org.junit.Assert;
+
+import java.util.List;
+import java.util.Locale;
+
+import static java.lang.String.format;
+import static java.util.Arrays.asList;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.is;
+
+public class EscapedFunctionsTests extends ESTestCase {
+
+ private final SqlParser parser = new SqlParser();
+
+ private Literal dateLiteral(String date) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "{d '%s'}", date));
+ assertThat(exp, instanceOf(Expression.class));
+ return (Literal) exp;
+ }
+
+ private Literal timeLiteral(String date) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "{t '%s'}", date));
+ assertThat(exp, instanceOf(Expression.class));
+ return (Literal) exp;
+ }
+
+ private Literal timestampLiteral(String date) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "{ts '%s'}", date));
+ assertThat(exp, instanceOf(Expression.class));
+ return (Literal) exp;
+ }
+
+ private Literal guidLiteral(String date) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "{guid '%s'}", date));
+ assertThat(exp, instanceOf(Expression.class));
+ return (Literal) exp;
+ }
+
+ private Limit limit(int limit) {
+ LogicalPlan plan = parser.createStatement(format(Locale.ROOT, "SELECT * FROM emp {limit %d}", limit));
+ assertThat(plan, instanceOf(With.class));
+ With with = (With) plan;
+ Limit limitPlan = (Limit) (with.child());
+ assertThat(limitPlan.limit(), instanceOf(Literal.class));
+ return limitPlan;
+ }
+
+ private LikePattern likeEscape(String like, String character) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "exp LIKE '%s' {escape '%s'}", like, character));
+ assertThat(exp, instanceOf(Like.class));
+ return ((Like) exp).right();
+ }
+
+ private Function function(String name) {
+ Expression exp = parser.createExpression(format(Locale.ROOT, "{fn %s}", name));
+ assertThat(exp, instanceOf(Function.class));
+ return (Function) exp;
+ }
+
+ public void testFunctionNoArg() {
+ Function f = function("SCORE()");
+ assertEquals("SCORE", f.functionName());
+ }
+
+ public void testFunctionOneArg() {
+ Function f = function("ABS(foo)");
+ assertEquals("ABS", f.functionName());
+ assertEquals(1, f.arguments().size());
+ Expression arg = f.arguments().get(0);
+ assertThat(arg, instanceOf(UnresolvedAttribute.class));
+ UnresolvedAttribute ua = (UnresolvedAttribute) arg;
+ assertThat(ua.name(), is("foo"));
+ }
+
+ public void testFunctionOneArgFunction() {
+ Function f = function("ABS({fn SCORE()})");
+ assertEquals("ABS", f.functionName());
+ assertEquals(1, f.arguments().size());
+ Expression arg = f.arguments().get(0);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ UnresolvedFunction uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("SCORE"));
+ }
+
+ public void testFunctionFloorWithExtract() {
+ Function f = function("CAST({fn FLOOR({fn EXTRACT(YEAR FROM \"foo\")})} AS int)");
+ assertEquals("CAST", f.functionName());
+ assertEquals(1, f.arguments().size());
+ Expression arg = f.arguments().get(0);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ f = (Function) arg;
+ assertEquals("FLOOR", f.functionName());
+ assertEquals(1, f.arguments().size());
+ arg = f.arguments().get(0);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ UnresolvedFunction uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("YEAR"));
+ }
+
+ public void testFunctionWithFunctionWithArg() {
+ Function f = function("POWER(foo, {fn POWER({fn SCORE()}, {fN SCORE()})})");
+ assertEquals("POWER", f.functionName());
+ assertEquals(2, f.arguments().size());
+ Expression arg = f.arguments().get(1);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ UnresolvedFunction uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("POWER"));
+ assertEquals(2, uf.arguments().size());
+
+ List args = uf.arguments();
+ arg = args.get(0);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("SCORE"));
+
+ arg = args.get(1);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("SCORE"));
+ }
+
+ public void testFunctionWithFunctionWithArgAndParams() {
+ Function f = (Function) parser.createExpression("POWER(?, {fn POWER({fn ABS(?)}, {fN ABS(?)})})",
+ asList(new SqlTypedParamValue(DataType.LONG, 1),
+ new SqlTypedParamValue(DataType.LONG, 1),
+ new SqlTypedParamValue(DataType.LONG, 1)));
+
+ assertEquals("POWER", f.functionName());
+ assertEquals(2, f.arguments().size());
+ Expression arg = f.arguments().get(1);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ UnresolvedFunction uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("POWER"));
+ assertEquals(2, uf.arguments().size());
+
+ List args = uf.arguments();
+ arg = args.get(0);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("ABS"));
+
+ arg = args.get(1);
+ assertThat(arg, instanceOf(UnresolvedFunction.class));
+ uf = (UnresolvedFunction) arg;
+ assertThat(uf.name(), is("ABS"));
+ }
+
+ public void testDateLiteral() {
+ Literal l = dateLiteral("2012-01-01");
+ assertThat(l.dataType(), is(DataType.DATE));
+ }
+
+ public void testDateLiteralValidation() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> dateLiteral("2012-13-01"));
+ assertEquals("line 1:2: Invalid date received; Cannot parse \"2012-13-01\": Value 13 for monthOfYear must be in the range [1,12]",
+ ex.getMessage());
+ }
+
+ public void testTimeLiteralUnsupported() {
+ SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> timeLiteral("10:10:10"));
+ assertThat(ex.getMessage(), is("Time (only) literals are not supported; a date component is required as well"));
+ }
+
+ public void testTimeLiteralValidation() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> timeLiteral("10:10:65"));
+ assertEquals("line 1:2: Invalid time received; Cannot parse \"10:10:65\": Value 65 for secondOfMinute must be in the range [0,59]",
+ ex.getMessage());
+ }
+
+ public void testTimestampLiteral() {
+ Literal l = timestampLiteral("2012-01-01 10:01:02.3456");
+ assertThat(l.dataType(), is(DataType.DATE));
+ }
+
+ public void testTimestampLiteralValidation() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> timestampLiteral("2012-01-01T10:01:02.3456"));
+ assertEquals(
+ "line 1:2: Invalid timestamp received; Invalid format: \"2012-01-01T10:01:02.3456\" is malformed at \"T10:01:02.3456\"",
+ ex.getMessage());
+ }
+
+ public void testGUID() {
+ Literal l = guidLiteral("12345678-90ab-cdef-0123-456789abcdef");
+ assertThat(l.dataType(), is(DataType.KEYWORD));
+
+ l = guidLiteral("12345678-90AB-cdef-0123-456789ABCdef");
+ assertThat(l.dataType(), is(DataType.KEYWORD));
+ }
+
+ public void testGUIDValidationHexa() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678-90ab-cdef-0123-456789abcdeH"));
+ assertEquals("line 1:8: Invalid GUID, expected hexadecimal at offset[35], found [H]", ex.getMessage());
+ }
+
+ public void testGUIDValidationGroups() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90ab-cdef-0123-456789abcdeH"));
+ assertEquals("line 1:8: Invalid GUID, expected group separator at offset [8], found [A]", ex.getMessage());
+ }
+
+ public void testGUIDValidationLength() {
+ ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90"));
+ assertEquals("line 1:8: Invalid GUID, too short", ex.getMessage());
+ }
+
+
+ public void testLimit() {
+ Limit limit = limit(10);
+ Literal l = (Literal) limit.limit();
+ Assert.assertThat(l.value(), is(10));
+ }
+
+ public void testLikeEscape() {
+ LikePattern pattern = likeEscape("|%tring", "|");
+ assertThat(pattern.escape(), is('|'));
+ }
+}
\ No newline at end of file
From 0d6b47bed9e433ccb0af52770cb662577a0e3519 Mon Sep 17 00:00:00 2001
From: Tanguy Leroux
Date: Thu, 12 Jul 2018 09:21:10 +0200
Subject: [PATCH 16/17] [Test] Reactive 3rd party tests on CI (#31919)
3rd party tests are failing because the repository-s3 is expecting some
enviromnent variables in order to test session tokens but the CI job is
not ready yet to provide those. This pull request relaxes the constraints
on the presence of env vars so that the 3rd party tests can still be
executed on CI.
closes #31813
---
plugins/repository-s3/build.gradle | 22 ++++++++++++++++------
1 file changed, 16 insertions(+), 6 deletions(-)
diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle
index dc2140a6086..5af0a412b4c 100644
--- a/plugins/repository-s3/build.gradle
+++ b/plugins/repository-s3/build.gradle
@@ -92,23 +92,26 @@ String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary")
// If all these variables are missing then we are testing against the internal fixture instead, which has the following
// credentials hard-coded in.
-if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath
- && !s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
-
+if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) {
s3PermanentAccessKey = 's3_integration_test_permanent_access_key'
s3PermanentSecretKey = 's3_integration_test_permanent_secret_key'
s3PermanentBucket = 'permanent-bucket-test'
s3PermanentBasePath = 'integration_test'
+ useFixture = true
+
+} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) {
+ throw new IllegalArgumentException("not all options specified to run against external S3 service")
+}
+
+if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
s3TemporaryAccessKey = 's3_integration_test_temporary_access_key'
s3TemporarySecretKey = 's3_integration_test_temporary_secret_key'
s3TemporaryBucket = 'temporary-bucket-test'
s3TemporaryBasePath = 'integration_test'
s3TemporarySessionToken = 's3_integration_test_temporary_session_token'
- useFixture = true
-} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath
- || !s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
+} else if (!s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
throw new IllegalArgumentException("not all options specified to run against external S3 service")
}
@@ -296,6 +299,13 @@ processTestResources {
MavenFilteringHack.filter(it, expansions)
}
+project.afterEvaluate {
+ if (useFixture == false) {
+ // 30_repository_temporary_credentials is not ready for CI yet
+ integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*'
+ }
+}
+
integTestCluster {
keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey
keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey
From ac4e0f1b1d34d913b7d5394ba99794e06186d757 Mon Sep 17 00:00:00 2001
From: Alexander Reelsen
Date: Thu, 12 Jul 2018 09:55:17 +0200
Subject: [PATCH 17/17] Tests: Remove use of joda time in some tests (#31922)
This also extends the dateformatters test to ensure that the printers
are acting the same in java time and joda time.
---
.../ingest/common/DateProcessorTests.java | 35 +++---
.../cluster/metadata/IndexMetaData.java | 8 +-
.../metadata/MetaDataCreateIndexService.java | 5 +-
.../common/time/DateFormatters.java | 109 ++++++++++++------
.../rest/action/cat/RestIndicesAction.java | 7 +-
.../admin/indices/rollover/RolloverIT.java | 20 ++--
.../HumanReadableIndexSettingsTests.java | 9 +-
.../joda/JavaJodaTimeDuellingTests.java | 100 ++++++++++++++++
.../explain/ExplainActionIT.java | 11 +-
.../indices/IndicesRequestCacheIT.java | 12 +-
.../aggregations/bucket/MinDocCountIT.java | 12 +-
.../highlight/HighlighterSearchIT.java | 6 +-
.../search/fields/SearchFieldsIT.java | 22 ++--
.../functionscore/DecayFunctionScoreIT.java | 14 +--
.../search/query/SearchQueryIT.java | 8 +-
.../validate/SimpleValidateQueryIT.java | 20 ++--
16 files changed, 274 insertions(+), 124 deletions(-)
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
index 8fba759aa16..43a5f9245b1 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
@@ -24,9 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.script.TemplateScript;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -36,19 +37,21 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.joda.time.DateTimeZone.UTC;
public class DateProcessorTests extends ESTestCase {
+
private TemplateScript.Factory templatize(Locale locale) {
return new TestTemplateService.MockTemplateScript.Factory(locale.getLanguage());
}
- private TemplateScript.Factory templatize(DateTimeZone timezone) {
- return new TestTemplateService.MockTemplateScript.Factory(timezone.getID());
+ private TemplateScript.Factory templatize(ZoneId timezone) {
+ // prevent writing "UTC" as string, as joda time does not parse it
+ String id = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId();
+ return new TestTemplateService.MockTemplateScript.Factory(id);
}
public void testJodaPattern() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010 12 06 11:05:15");
@@ -63,7 +66,7 @@ public class DateProcessorTests extends ESTestCase {
matchFormats.add("dd/MM/yyyy");
matchFormats.add("dd-MM-yyyy");
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", matchFormats, "date_as_date");
Map document = new HashMap<>();
@@ -98,7 +101,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidJodaPattern() {
try {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
- templatize(UTC), templatize(randomLocale(random())),
+ templatize(ZoneOffset.UTC), templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("invalid pattern"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010");
@@ -112,7 +115,7 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternLocale() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ITALIAN),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN),
"date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010 12 giugno");
@@ -123,18 +126,18 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternDefaultYear() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("dd/MM"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "12/06");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
dateProcessor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("date_as_date", String.class),
- equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
+ equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
}
public void testTAI64N() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(DateTimeZone.forOffsetHours(2)),
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.ofHours(2)),
templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("TAI64N"), "date_as_date");
Map document = new HashMap<>();
@@ -146,8 +149,8 @@ public class DateProcessorTests extends ESTestCase {
}
public void testUnixMs() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), templatize(randomLocale(random())),
- "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
+ templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "1000500");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
@@ -162,7 +165,7 @@ public class DateProcessorTests extends ESTestCase {
}
public void testUnix() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC),
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("UNIX"), "date_as_date");
Map document = new HashMap<>();
@@ -186,7 +189,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidLocale() {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
- templatize(UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"),
+ templatize(ZoneOffset.UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"),
"date_as_string", Collections.singletonList("yyyy"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010");
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
index d978e214fc9..90380205012 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
@@ -23,7 +23,6 @@ import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
-
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
import org.elasticsearch.action.support.ActiveShardCount;
@@ -56,10 +55,11 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
@@ -1345,7 +1345,7 @@ public class IndexMetaData implements Diffable, ToXContentFragmen
}
Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null);
if (creationDate != null) {
- DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC);
+ ZonedDateTime creationDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(creationDate), ZoneOffset.UTC);
builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString());
}
return builder.build();
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
index be9db5262b0..b19d65090c6 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
@@ -73,11 +73,10 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason;
import org.elasticsearch.threadpool.ThreadPool;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.UnsupportedEncodingException;
import java.nio.file.Path;
+import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -383,7 +382,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
}
if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
- indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis());
+ indexSettingsBuilder.put(SETTING_CREATION_DATE, Instant.now().toEpochMilli());
}
indexSettingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, request.getProvidedName());
indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID());
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
index e781c979ed9..eef2ab55587 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
@@ -52,12 +52,6 @@ import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
public class DateFormatters {
- private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
- .optionalStart().appendZoneId().optionalEnd()
- .optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
- .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
- .toFormatter(Locale.ROOT);
-
private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder()
.appendZoneId()
.toFormatter(Locale.ROOT);
@@ -70,12 +64,80 @@ public class DateFormatters {
.appendOffset("+HH:mm", "Z")
.toFormatter(Locale.ROOT);
+ private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
+ .optionalStart().appendZoneId().optionalEnd()
+ .optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
+ .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
+ .toFormatter(Locale.ROOT);
+
private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.optionalStart()
.append(TIME_ZONE_FORMATTER)
.optionalEnd()
.toFormatter(Locale.ROOT);
+ private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
+ .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
+ .appendLiteral("-")
+ .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral('-')
+ .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_1 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_WITHOUT_COLON)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_WITH_COLON)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_ZONE_ID)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME =
+ new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2,
+ STRICT_DATE_OPTIONAL_TIME_FORMATTER_3);
+
private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
@@ -258,7 +320,8 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
+ private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer,
+ new DateTimeFormatterBuilder()
.append(DATE_FORMATTER)
.parseLenient()
.optionalStart()
@@ -560,14 +623,6 @@ public class DateFormatters {
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT));
- private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
- .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
- .appendLiteral("-")
- .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral('-')
- .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
- .toFormatter(Locale.ROOT);
-
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER);
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
@@ -580,14 +635,6 @@ public class DateFormatters {
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter(Locale.ROOT));
- private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
- .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral(':')
- .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral(':')
- .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
- .toFormatter(Locale.ROOT);
-
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND =
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER);
@@ -601,18 +648,6 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
- .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
- .optionalStart()
- .appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
- .optionalStart()
- .appendFraction(MILLI_OF_SECOND, 3, 3, true)
- .optionalEnd()
- .append(OPTIONAL_TIME_ZONE_FORMATTER)
- .optionalEnd()
- .toFormatter(Locale.ROOT));
-
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
@@ -918,8 +953,8 @@ public class DateFormatters {
return forPattern(formats[0], locale);
} else {
Collection parsers = new LinkedHashSet<>(formats.length);
- for (int i = 0; i < formats.length; i++) {
- CompoundDateTimeFormatter dateTimeFormatter = forPattern(formats[i], locale);
+ for (String format : formats) {
+ CompoundDateTimeFormatter dateTimeFormatter = forPattern(format, locale);
try {
parsers.addAll(Arrays.asList(dateTimeFormatter.parsers));
} catch (IllegalArgumentException e) {
diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
index 52da10a3785..3a76c7ca0c9 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
@@ -45,9 +45,10 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.RestActionListener;
import org.elasticsearch.rest.action.RestResponseListener;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
@@ -379,7 +380,7 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted());
table.addCell(indexMetaData.getCreationDate());
- table.addCell(new DateTime(indexMetaData.getCreationDate(), DateTimeZone.UTC));
+ table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC));
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
index 4d86dbbc51f..5379769e819 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
@@ -25,16 +25,16 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -197,8 +197,8 @@ public class RolloverIT extends ESIntegTestCase {
}
public void testRolloverWithDateMath() {
- DateTime now = new DateTime(DateTimeZone.UTC);
- String index = "test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-1";
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
ensureGreen(index);
@@ -212,14 +212,14 @@ public class RolloverIT extends ESIntegTestCase {
ensureGreen(index);
RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo(index));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
response = client().admin().indices().prepareRolloverIndex("test_alias").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
@@ -232,8 +232,8 @@ public class RolloverIT extends ESIntegTestCase {
IndexMetaData.SETTING_INDEX_PROVIDED_NAME));
response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-000004"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
index 9be087e0e5d..83c615e4882 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
@@ -22,8 +22,10 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import static org.elasticsearch.test.VersionUtils.randomVersion;
@@ -42,6 +44,7 @@ public class HumanReadableIndexSettingsTests extends ESTestCase {
assertEquals(versionCreated.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_CREATED_STRING, null));
assertEquals(versionUpgraded.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_UPGRADED_STRING, null));
- assertEquals(new DateTime(created, DateTimeZone.UTC).toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null));
+ ZonedDateTime creationDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(created), ZoneOffset.UTC);
+ assertEquals(creationDate.toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
index 7c6f0872288..d6f733d7c1c 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
@@ -23,7 +23,9 @@ import org.elasticsearch.common.time.CompoundDateTimeFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
@@ -354,11 +356,109 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertParseException("2012-W1-1", "strict_weekyear_week_day");
}
+ public void testSamePrinterOutput() {
+ int year = randomIntBetween(1970, 2030);
+ int month = randomIntBetween(1, 12);
+ int day = randomIntBetween(1, 28);
+ int hour = randomIntBetween(0, 23);
+ int minute = randomIntBetween(0, 59);
+ int second = randomIntBetween(0, 59);
+
+ ZonedDateTime javaDate = ZonedDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC);
+ DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC);
+
+ assertSamePrinterOutput("basicDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("date", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHour", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("dateOptionalTime", javaDate, jodaDate);
+ assertSamePrinterOutput("dateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("dateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("hour", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("time", javaDate, jodaDate);
+ assertSamePrinterOutput("timeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("tTime", javaDate, jodaDate);
+ assertSamePrinterOutput("tTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyear", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyearWeek", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyearWeekDay", javaDate, jodaDate);
+ assertSamePrinterOutput("year", javaDate, jodaDate);
+ assertSamePrinterOutput("yearMonth", javaDate, jodaDate);
+ assertSamePrinterOutput("yearMonthDay", javaDate, jodaDate);
+ assertSamePrinterOutput("epoch_second", javaDate, jodaDate);
+ assertSamePrinterOutput("epoch_millis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHour", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateOptionalTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHour", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyear", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyearWeek", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyearWeekDay", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYear", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
+ }
+
public void testSeveralTimeFormats() {
assertSameDate("2018-12-12", "year_month_day||ordinal_date");
assertSameDate("2018-128", "year_month_day||ordinal_date");
}
+ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
+ assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000));
+ String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate);
+ String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate);
+ assertThat(javaTimeOut, is(jodaTimeOut));
+ }
+
private void assertSameDate(String input, String format) {
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input);
diff --git a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
index 6d8e1a41c5b..229cb99fbfb 100644
--- a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
+++ b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
@@ -28,12 +28,12 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.ISODateTimeFormat;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@@ -249,8 +249,9 @@ public class ExplainActionIT extends ESIntegTestCase {
public void testExplainDateRangeInQueryString() {
createIndex("test");
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
index 70a633f02f4..0f12305f239 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
@@ -30,11 +30,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
-import org.joda.time.chrono.ISOChronology;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
@@ -255,7 +255,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings).get());
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
- DateTime now = new DateTime(ISOChronology.getInstanceUTC());
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
@@ -456,9 +456,9 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings)
.addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")))
.get());
- DateTime now = new DateTime(DateTimeZone.UTC);
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at",
- DateTimeFormat.forPattern("YYYY-MM-dd").print(now)).get();
+ DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get();
refresh();
assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
index af1104879e9..4a85c2c1453 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
@@ -22,10 +22,10 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
@@ -40,10 +40,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -123,8 +122,9 @@ public class MinDocCountIT extends AbstractTermsTestCase {
longTerm = randomInt(cardinality * 2);
} while (!longTerms.add(longTerm));
double doubleTerm = longTerm * Math.PI;
- String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd")
- .print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC));
+
+ ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
+ String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time);
final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; ++j) {
indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index 35c5a19cc2e..e5af22cd2ae 100644
--- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -63,10 +63,10 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
@@ -2865,7 +2865,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
"field", "type=text,store=true,term_vector=with_positions_offsets")
.setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2))
.get());
- DateTime now = new DateTime(ISOChronology.getInstanceUTC());
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"),
client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"),
client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world"));
diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
index ab5387b6e3f..452c00b9906 100644
--- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
+++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
@@ -28,8 +28,8 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.document.DocumentField;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@@ -48,8 +48,9 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
-import org.joda.time.ReadableDateTime;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
@@ -546,6 +547,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
+ ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
@@ -553,7 +555,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
- .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
+ .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true)
.field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8")))
.endObject()).execute().actionGet();
@@ -578,7 +580,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
"float_field", "double_field", "date_field", "boolean_field", "binary_field")));
-
SearchHit searchHit = searchResponse.getHits().getAt(0);
assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2"));
@@ -586,7 +587,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f));
assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
- String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC));
+ String dateTime = DateFormatters.forPattern("dateOptionalTime").format(date);
assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime));
assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE));
assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8"))));
@@ -756,7 +757,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
- ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("text_field", "foo")
.field("keyword_field", "foo")
@@ -766,7 +767,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
- .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date))
+ .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true)
.field("binary_field", new byte[] {42, 100})
.field("ip_field", "::1")
@@ -802,7 +803,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
- assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date));
+ assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
+ equalTo(new DateTime(date.toInstant().toEpochMilli(), DateTimeZone.UTC)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@@ -839,7 +841,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
- equalTo(Joda.forPattern("dateOptionalTime").printer().print(date)));
+ equalTo(DateFormatters.forPattern("dateOptionalTime").format(date)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@@ -869,7 +871,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
- equalTo(Joda.forPattern("epoch_millis").printer().print(date)));
+ equalTo(DateFormatters.forPattern("epoch_millis").format(date)));
}
public void testScriptFields() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
index d6acdf11cb2..a21893db392 100644
--- a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
+++ b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
@@ -43,9 +43,9 @@ import org.elasticsearch.search.SearchHits;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -562,27 +562,27 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
}
public void testDateWithoutOrigin() throws Exception {
- DateTime dt = new DateTime(DateTimeZone.UTC);
+ ZonedDateTime dt = ZonedDateTime.now(ZoneOffset.UTC);
assertAcked(prepareCreate("test").addMapping(
"type1",
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
.endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
- DateTime docDate = dt.minusDays(1);
- String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ ZonedDateTime docDate = dt.minusDays(1);
+ String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(2);
- docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("2")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(3);
- docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("3")
diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index eab3a6e9b48..be71867edd2 100644
--- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -52,6 +52,9 @@ import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.Collection;
import java.util.Collections;
import java.util.Random;
@@ -480,8 +483,9 @@ public class SearchQueryIT extends ESIntegTestCase {
"type", "past", "type=date", "future", "type=date"
));
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
refresh();
diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 8b3aff90e8d..cdbc2c702d8 100644
--- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -35,12 +35,13 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.hamcrest.Matcher;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.List;
@@ -124,8 +125,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
.put(indexSettings())
.put("index.number_of_shards", 1)));
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
@@ -137,10 +139,10 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
- DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay();
- DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1);
- assertThat(response.getQueryExplanation().get(0).getExplanation(),
- equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]"));
+
+ long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000;
+ long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1;
+ assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]"));
assertThat(response.isValid(), equalTo(true));
}