Merge branch 'master' into index-lifecycle

This commit is contained in:
Colin Goodheart-Smithe 2018-07-12 08:57:39 +01:00
commit 4192f387b4
No known key found for this signature in database
GPG Key ID: F975E7BDD739B3C7
76 changed files with 4810 additions and 1826 deletions

View File

@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
@ -963,6 +964,20 @@ final class RequestConverters {
return request; return request;
} }
static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
.addPathPart(snapshotsStatusRequest.repository())
.addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots())
.addPathPartAsIs("_status")
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
Params parameters = new Params(request);
parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout());
parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable());
return request;
}
static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) {
String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot")
.addPathPart(deleteSnapshotRequest.repository()) .addPathPart(deleteSnapshotRequest.repository())
@ -1262,7 +1277,7 @@ final class RequestConverters {
} }
Params withIndicesOptions(IndicesOptions indicesOptions) { Params withIndicesOptions(IndicesOptions indicesOptions) {
putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable())); withIgnoreUnavailable(indicesOptions.ignoreUnavailable());
putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices())); putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
String expandWildcards; String expandWildcards;
if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) { if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) {
@ -1281,6 +1296,12 @@ final class RequestConverters {
return this; return this;
} }
Params withIgnoreUnavailable(boolean ignoreUnavailable) {
// Always explicitly place the ignore_unavailable value.
putParam("ignore_unavailable", Boolean.toString(ignoreUnavailable));
return this;
}
Params withHuman(boolean human) { Params withHuman(boolean human) {
if (human) { if (human) {
putParam("human", Boolean.toString(human)); putParam("human", Boolean.toString(human));

View File

@ -30,6 +30,8 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyReposito
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
@ -221,6 +223,35 @@ public final class SnapshotClient {
GetSnapshotsResponse::fromXContent, listener, emptySet()); GetSnapshotsResponse::fromXContent, listener, emptySet());
} }
/**
* Gets the status of requested snapshots.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
* API on elastic.co</a>
* @param snapshotsStatusRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options)
throws IOException {
return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
SnapshotsStatusResponse::fromXContent, emptySet());
}
/**
* Asynchronously gets the status of requested snapshots.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore
* API on elastic.co</a>
* @param snapshotsStatusRequest the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon request completion
*/
public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options,
ActionListener<SnapshotsStatusResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options,
SnapshotsStatusResponse::fromXContent, listener, emptySet());
}
/** /**
* Deletes a snapshot. * Deletes a snapshot.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore * See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html"> Snapshot and Restore

View File

@ -612,7 +612,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
createIndex(index, Settings.EMPTY); createIndex(index, Settings.EMPTY);
closeIndex(index); closeIndex(index);
ResponseException exception = expectThrows(ResponseException.class, ResponseException exception = expectThrows(ResponseException.class,
() -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search")); () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")));
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(index), equalTo(true)); assertThat(exception.getMessage().contains(index), equalTo(true));
@ -621,7 +621,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
highLevelClient().indices()::openAsync); highLevelClient().indices()::openAsync);
assertTrue(openIndexResponse.isAcknowledged()); assertTrue(openIndexResponse.isAcknowledged());
Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search"); Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
} }
@ -650,7 +650,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testCloseExistingIndex() throws IOException { public void testCloseExistingIndex() throws IOException {
String index = "index"; String index = "index";
createIndex(index, Settings.EMPTY); createIndex(index, Settings.EMPTY);
Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search"); Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
CloseIndexRequest closeIndexRequest = new CloseIndexRequest(index); CloseIndexRequest closeIndexRequest = new CloseIndexRequest(index);
@ -659,7 +659,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertTrue(closeIndexResponse.isAcknowledged()); assertTrue(closeIndexResponse.isAcknowledged());
ResponseException exception = expectThrows(ResponseException.class, ResponseException exception = expectThrows(ResponseException.class,
() -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search")); () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")));
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus()));
assertThat(exception.getMessage().contains(index), equalTo(true)); assertThat(exception.getMessage().contains(index), equalTo(true));
} }
@ -817,7 +817,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertFalse(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync)); assertFalse(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync));
createIndex("index", Settings.EMPTY); createIndex("index", Settings.EMPTY);
client().performRequest(HttpPut.METHOD_NAME, "/index/_alias/alias"); client().performRequest(new Request(HttpPut.METHOD_NAME, "/index/_alias/alias"));
assertTrue(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync)); assertTrue(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync));
GetAliasesRequest getAliasesRequest2 = new GetAliasesRequest(); GetAliasesRequest getAliasesRequest2 = new GetAliasesRequest();
@ -936,10 +936,10 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
public void testGetAlias() throws IOException { public void testGetAlias() throws IOException {
{ {
createIndex("index1", Settings.EMPTY); createIndex("index1", Settings.EMPTY);
client().performRequest(HttpPut.METHOD_NAME, "/index1/_alias/alias1"); client().performRequest(new Request(HttpPut.METHOD_NAME, "/index1/_alias/alias1"));
createIndex("index2", Settings.EMPTY); createIndex("index2", Settings.EMPTY);
client().performRequest(HttpPut.METHOD_NAME, "/index2/_alias/alias2"); client().performRequest(new Request(HttpPut.METHOD_NAME, "/index2/_alias/alias2"));
createIndex("index3", Settings.EMPTY); createIndex("index3", Settings.EMPTY);
} }
@ -1075,7 +1075,7 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
assertThat(getAliasesResponse.getError(), equalTo("alias [" + alias + "] missing")); assertThat(getAliasesResponse.getError(), equalTo("alias [" + alias + "] missing"));
} }
createIndex(index, Settings.EMPTY); createIndex(index, Settings.EMPTY);
client().performRequest(HttpPut.METHOD_NAME, index + "/_alias/" + alias); client().performRequest(new Request(HttpPut.METHOD_NAME, index + "/_alias/" + alias));
{ {
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index"); GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index");
GetAliasesResponse getAliasesResponse = execute(getAliasesRequest, highLevelClient().indices()::getAlias, GetAliasesResponse getAliasesResponse = execute(getAliasesRequest, highLevelClient().indices()::getAlias,

View File

@ -39,7 +39,7 @@ public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
public void testInfo() throws IOException { public void testInfo() throws IOException {
MainResponse info = highLevelClient().info(RequestOptions.DEFAULT); MainResponse info = highLevelClient().info(RequestOptions.DEFAULT);
// compare with what the low level client outputs // compare with what the low level client outputs
Map<String, Object> infoAsMap = entityAsMap(adminClient().performRequest(HttpGet.METHOD_NAME, "/")); Map<String, Object> infoAsMap = entityAsMap(adminClient().performRequest(new Request(HttpGet.METHOD_NAME, "/")));
assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value()); assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value());
assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid()); assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid());

View File

@ -19,8 +19,6 @@
package org.elasticsearch.client; package org.elasticsearch.client;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder;
@ -37,7 +35,6 @@ import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -49,19 +46,17 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
@Before @Before
public void indexDocuments() throws IOException { public void indexDocuments() throws IOException {
StringEntity doc = new StringEntity("{\"text\":\"berlin\"}", ContentType.APPLICATION_JSON); Request berlin = new Request("PUT", "/index/doc/berlin");
client().performRequest("PUT", "/index/doc/1", Collections.emptyMap(), doc); berlin.setJsonEntity("{\"text\":\"berlin\"}");
doc = new StringEntity("{\"text\":\"amsterdam\"}", ContentType.APPLICATION_JSON); client().performRequest(berlin);
client().performRequest("PUT", "/index/doc/2", Collections.emptyMap(), doc); for (int i = 0; i < 6; i++) {
client().performRequest("PUT", "/index/doc/3", Collections.emptyMap(), doc); // add another index to test basic multi index support
client().performRequest("PUT", "/index/doc/4", Collections.emptyMap(), doc); String index = i == 0 ? "index2" : "index";
client().performRequest("PUT", "/index/doc/5", Collections.emptyMap(), doc); Request amsterdam = new Request("PUT", "/" + index + "/doc/amsterdam" + i);
client().performRequest("PUT", "/index/doc/6", Collections.emptyMap(), doc); amsterdam.setJsonEntity("{\"text\":\"amsterdam\"}");
client().performRequest("POST", "/index/_refresh"); client().performRequest(amsterdam);
}
// add another index to test basic multi index support client().performRequest(new Request("POST", "/_refresh"));
client().performRequest("PUT", "/index2/doc/7", Collections.emptyMap(), doc);
client().performRequest("POST", "/index2/_refresh");
} }
/** /**
@ -71,10 +66,10 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
public void testRankEvalRequest() throws IOException { public void testRankEvalRequest() throws IOException {
SearchSourceBuilder testQuery = new SearchSourceBuilder(); SearchSourceBuilder testQuery = new SearchSourceBuilder();
testQuery.query(new MatchAllQueryBuilder()); testQuery.query(new MatchAllQueryBuilder());
List<RatedDocument> amsterdamRatedDocs = createRelevant("index" , "2", "3", "4", "5"); List<RatedDocument> amsterdamRatedDocs = createRelevant("index" , "amsterdam1", "amsterdam2", "amsterdam3", "amsterdam4");
amsterdamRatedDocs.addAll(createRelevant("index2", "7")); amsterdamRatedDocs.addAll(createRelevant("index2", "amsterdam0"));
RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery); RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery);
RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "1"), testQuery); RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "berlin"), testQuery);
List<RatedRequest> specifications = new ArrayList<>(); List<RatedRequest> specifications = new ArrayList<>();
specifications.add(amsterdamRequest); specifications.add(amsterdamRequest);
specifications.add(berlinRequest); specifications.add(berlinRequest);
@ -94,7 +89,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
assertEquals(7, hitsAndRatings.size()); assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) { for (RatedSearchHit hit : hitsAndRatings) {
String id = hit.getSearchHit().getId(); String id = hit.getSearchHit().getId();
if (id.equals("1") || id.equals("6")) { if (id.equals("berlin") || id.equals("amsterdam5")) {
assertFalse(hit.getRating().isPresent()); assertFalse(hit.getRating().isPresent());
} else { } else {
assertEquals(1, hit.getRating().get().intValue()); assertEquals(1, hit.getRating().get().intValue());
@ -106,7 +101,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
assertEquals(7, hitsAndRatings.size()); assertEquals(7, hitsAndRatings.size());
for (RatedSearchHit hit : hitsAndRatings) { for (RatedSearchHit hit : hitsAndRatings) {
String id = hit.getSearchHit().getId(); String id = hit.getSearchHit().getId();
if (id.equals("1")) { if (id.equals("berlin")) {
assertEquals(1, hit.getRating().get().intValue()); assertEquals(1, hit.getRating().get().intValue());
} else { } else {
assertFalse(hit.getRating().isPresent()); assertFalse(hit.getRating().isPresent());
@ -114,7 +109,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
} }
// now try this when test2 is closed // now try this when test2 is closed
client().performRequest("POST", "index2/_close", Collections.emptyMap()); client().performRequest(new Request("POST", "index2/_close"));
rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS));
response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync);
} }

View File

@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotReq
import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
@ -175,6 +176,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXC
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
@ -2171,6 +2173,29 @@ public class RequestConvertersTests extends ESTestCase {
assertNull(request.getEntity()); assertNull(request.getEntity());
} }
public void testSnapshotsStatus() {
Map<String, String> expectedParams = new HashMap<>();
String repository = randomIndicesNames(1, 1)[0];
String[] snapshots = randomIndicesNames(1, 5);
StringBuilder snapshotNames = new StringBuilder(snapshots[0]);
for (int idx = 1; idx < snapshots.length; idx++) {
snapshotNames.append(",").append(snapshots[idx]);
}
boolean ignoreUnavailable = randomBoolean();
String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status";
SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots);
setRandomMasterTimeout(snapshotsStatusRequest, expectedParams);
snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable);
expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable));
Request request = RequestConverters.snapshotsStatus(snapshotsStatusRequest);
assertThat(request.getEndpoint(), equalTo(endpoint));
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
assertThat(request.getParameters(), equalTo(expectedParams));
assertThat(request.getEntity(), is(nullValue()));
}
public void testDeleteSnapshot() { public void testDeleteSnapshot() {
Map<String, String> expectedParams = new HashMap<>(); Map<String, String> expectedParams = new HashMap<>();
String repository = randomIndicesNames(1, 1)[0]; String repository = randomIndicesNames(1, 1)[0];

View File

@ -19,12 +19,8 @@
package org.elasticsearch.client; package org.elasticsearch.client;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.explain.ExplainRequest;
@ -101,85 +97,106 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
@Before @Before
public void indexDocuments() throws IOException { public void indexDocuments() throws IOException {
StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON); {
client().performRequest(HttpPut.METHOD_NAME, "/index/type/1", Collections.emptyMap(), doc1); Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON); doc1.setJsonEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}");
client().performRequest(HttpPut.METHOD_NAME, "/index/type/2", Collections.emptyMap(), doc2); client().performRequest(doc1);
StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON); Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
client().performRequest(HttpPut.METHOD_NAME, "/index/type/3", Collections.emptyMap(), doc3); doc2.setJsonEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}");
StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); client().performRequest(doc2);
client().performRequest(HttpPut.METHOD_NAME, "/index/type/4", Collections.emptyMap(), doc4); Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); doc3.setJsonEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}");
client().performRequest(HttpPut.METHOD_NAME, "/index/type/5", Collections.emptyMap(), doc5); client().performRequest(doc3);
client().performRequest(HttpPost.METHOD_NAME, "/index/_refresh"); Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
doc4.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
client().performRequest(doc4);
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
doc5.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
client().performRequest(doc5);
}
{
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/doc/1");
doc1.setJsonEntity("{\"field\":\"value1\", \"rating\": 7}");
client().performRequest(doc1);
Request doc2 = new Request(HttpPut.METHOD_NAME, "/index1/doc/2");
doc2.setJsonEntity("{\"field\":\"value2\"}");
client().performRequest(doc2);
}
StringEntity doc = new StringEntity("{\"field\":\"value1\", \"rating\": 7}", ContentType.APPLICATION_JSON); {
client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/1", Collections.emptyMap(), doc); Request create = new Request("PUT", "/index2");
doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); create.setJsonEntity(
client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/2", Collections.emptyMap(), doc); "{" +
StringEntity mappings = new StringEntity(
"{" +
" \"mappings\": {" +
" \"doc\": {" +
" \"properties\": {" +
" \"rating\": {" +
" \"type\": \"keyword\"" +
" }" +
" }" +
" }" +
" }" +
"}}",
ContentType.APPLICATION_JSON);
client().performRequest("PUT", "/index2", Collections.emptyMap(), mappings);
doc = new StringEntity("{\"field\":\"value1\", \"rating\": \"good\"}", ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/3", Collections.emptyMap(), doc);
doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/4", Collections.emptyMap(), doc);
doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc);
doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc);
mappings = new StringEntity(
"{" +
" \"mappings\": {" + " \"mappings\": {" +
" \"doc\": {" + " \"doc\": {" +
" \"properties\": {" + " \"properties\": {" +
" \"field1\": {" + " \"rating\": {" +
" \"type\": \"keyword\"," + " \"type\": \"keyword\"" +
" \"store\": true" +
" }," +
" \"field2\": {" +
" \"type\": \"keyword\"," +
" \"store\": true" +
" }" + " }" +
" }" + " }" +
" }" + " }" +
" }" + " }" +
"}}", "}");
ContentType.APPLICATION_JSON); client().performRequest(create);
client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings); Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/doc/3");
doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON); doc3.setJsonEntity("{\"field\":\"value1\", \"rating\": \"good\"}");
client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc); client().performRequest(doc3);
StringEntity aliasFilter = new StringEntity( Request doc4 = new Request(HttpPut.METHOD_NAME, "/index2/doc/4");
"{" + doc4.setJsonEntity("{\"field\":\"value2\"}");
" \"actions\" : [" + client().performRequest(doc4);
" {" + }
" \"add\" : {" +
" \"index\" : \"index4\"," +
" \"alias\" : \"alias4\"," +
" \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
" }" +
" }" +
" ]" +
"}",
ContentType.APPLICATION_JSON);
client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter);
client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh"); {
Request doc5 = new Request(HttpPut.METHOD_NAME, "/index3/doc/5");
doc5.setJsonEntity("{\"field\":\"value1\"}");
client().performRequest(doc5);
Request doc6 = new Request(HttpPut.METHOD_NAME, "/index3/doc/6");
doc6.setJsonEntity("{\"field\":\"value2\"}");
client().performRequest(doc6);
}
{
Request create = new Request(HttpPut.METHOD_NAME, "/index4");
create.setJsonEntity(
"{" +
" \"mappings\": {" +
" \"doc\": {" +
" \"properties\": {" +
" \"field1\": {" +
" \"type\": \"keyword\"," +
" \"store\": true" +
" }," +
" \"field2\": {" +
" \"type\": \"keyword\"," +
" \"store\": true" +
" }" +
" }" +
" }" +
" }" +
"}");
client().performRequest(create);
Request doc1 = new Request(HttpPut.METHOD_NAME, "/index4/doc/1");
doc1.setJsonEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}");
client().performRequest(doc1);
Request createFilteredAlias = new Request(HttpPost.METHOD_NAME, "/_aliases");
createFilteredAlias.setJsonEntity(
"{" +
" \"actions\" : [" +
" {" +
" \"add\" : {" +
" \"index\" : \"index4\"," +
" \"alias\" : \"alias4\"," +
" \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
" }" +
" }" +
" ]" +
"}");
client().performRequest(createFilteredAlias);
}
client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
} }
public void testSearchNoQuery() throws IOException { public void testSearchNoQuery() throws IOException {
@ -377,7 +394,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
public void testSearchWithParentJoin() throws IOException { public void testSearchWithParentJoin() throws IOException {
final String indexName = "child_example"; final String indexName = "child_example";
StringEntity parentMapping = new StringEntity("{\n" + Request createIndex = new Request(HttpPut.METHOD_NAME, "/" + indexName);
createIndex.setJsonEntity(
"{\n" +
" \"mappings\": {\n" + " \"mappings\": {\n" +
" \"qa\" : {\n" + " \"qa\" : {\n" +
" \"properties\" : {\n" + " \"properties\" : {\n" +
@ -388,9 +407,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" }\n" + " }\n" +
" }\n" + " }\n" +
" }" + " }" +
"}", ContentType.APPLICATION_JSON); "}");
client().performRequest(HttpPut.METHOD_NAME, "/" + indexName, Collections.emptyMap(), parentMapping); client().performRequest(createIndex);
StringEntity questionDoc = new StringEntity("{\n" + Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1");
questionDoc.setJsonEntity(
"{\n" +
" \"body\": \"<p>I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" + " \"body\": \"<p>I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" +
" \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" + " \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" +
" \"tags\": [\n" + " \"tags\": [\n" +
@ -399,9 +420,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"file-transfer\"\n" + " \"file-transfer\"\n" +
" ],\n" + " ],\n" +
" \"qa_join_field\" : \"question\"\n" + " \"qa_join_field\" : \"question\"\n" +
"}", ContentType.APPLICATION_JSON); "}");
client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1", Collections.emptyMap(), questionDoc); client().performRequest(questionDoc);
StringEntity answerDoc1 = new StringEntity("{\n" + Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2");
answerDoc1.addParameter("routing", "1");
answerDoc1.setJsonEntity(
"{\n" +
" \"owner\": {\n" + " \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" + " \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Sam\",\n" + " \"display_name\": \"Sam\",\n" +
@ -413,9 +437,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" + " \"parent\" : \"1\"\n" +
" },\n" + " },\n" +
" \"creation_date\": \"2009-05-04T13:45:37.030\"\n" + " \"creation_date\": \"2009-05-04T13:45:37.030\"\n" +
"}", ContentType.APPLICATION_JSON); "}");
client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2", Collections.singletonMap("routing", "1"), answerDoc1); client().performRequest(answerDoc1);
StringEntity answerDoc2 = new StringEntity("{\n" + Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3");
answerDoc2.addParameter("routing", "1");
answerDoc2.setJsonEntity(
"{\n" +
" \"owner\": {\n" + " \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" + " \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Troll\",\n" + " \"display_name\": \"Troll\",\n" +
@ -427,9 +454,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" + " \"parent\" : \"1\"\n" +
" },\n" + " },\n" +
" \"creation_date\": \"2009-05-05T13:45:37.030\"\n" + " \"creation_date\": \"2009-05-05T13:45:37.030\"\n" +
"}", ContentType.APPLICATION_JSON); "}");
client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3", Collections.singletonMap("routing", "1"), answerDoc2); client().performRequest(answerDoc2);
client().performRequest(HttpPost.METHOD_NAME, "/_refresh"); client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING) TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING)
.field("owner.display_name.keyword").size(10); .field("owner.display_name.keyword").size(10);
@ -506,9 +533,10 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
} }
public void testSearchWithWeirdScriptFields() throws Exception { public void testSearchWithWeirdScriptFields() throws Exception {
HttpEntity entity = new NStringEntity("{ \"field\":\"value\"}", ContentType.APPLICATION_JSON); Request doc = new Request("PUT", "test/type/1");
client().performRequest("PUT", "test/type/1", Collections.emptyMap(), entity); doc.setJsonEntity("{\"field\":\"value\"}");
client().performRequest("POST", "/test/_refresh"); client().performRequest(doc);
client().performRequest(new Request("POST", "/test/_refresh"));
{ {
SearchRequest searchRequest = new SearchRequest("test").source(SearchSourceBuilder.searchSource() SearchRequest searchRequest = new SearchRequest("test").source(SearchSourceBuilder.searchSource()
@ -547,13 +575,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
} }
public void testSearchScroll() throws Exception { public void testSearchScroll() throws Exception {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); Request doc = new Request(HttpPut.METHOD_NAME, "/test/type1/" + Integer.toString(i));
client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); doc.setJsonEntity(Strings.toString(builder));
client().performRequest(doc);
} }
client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh"); client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh"));
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC);
SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder); SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder);
@ -878,11 +906,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON); assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
} }
public void testMultiSearchTemplate() throws Exception { public void testMultiSearchTemplate() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
SearchTemplateRequest goodRequest = new SearchTemplateRequest(); SearchTemplateRequest goodRequest = new SearchTemplateRequest();
goodRequest.setRequest(new SearchRequest("index")); goodRequest.setRequest(new SearchRequest("index"));
goodRequest.setScriptType(ScriptType.INLINE); goodRequest.setScriptType(ScriptType.INLINE);
@ -900,8 +928,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
goodRequest.setExplain(true); goodRequest.setExplain(true);
goodRequest.setProfile(true); goodRequest.setProfile(true);
multiSearchTemplateRequest.add(goodRequest); multiSearchTemplateRequest.add(goodRequest);
SearchTemplateRequest badRequest = new SearchTemplateRequest(); SearchTemplateRequest badRequest = new SearchTemplateRequest();
badRequest.setRequest(new SearchRequest("index")); badRequest.setRequest(new SearchRequest("index"));
badRequest.setScriptType(ScriptType.INLINE); badRequest.setScriptType(ScriptType.INLINE);
@ -910,17 +938,17 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", 10); scriptParams.put("number", 10);
badRequest.setScriptParams(scriptParams); badRequest.setScriptParams(scriptParams);
multiSearchTemplateRequest.add(badRequest); multiSearchTemplateRequest.add(badRequest);
MultiSearchTemplateResponse multiSearchTemplateResponse = MultiSearchTemplateResponse multiSearchTemplateResponse =
execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync); highLevelClient()::multiSearchTemplateAsync);
Item[] responses = multiSearchTemplateResponse.getResponses(); Item[] responses = multiSearchTemplateResponse.getResponses();
assertEquals(2, responses.length); assertEquals(2, responses.length);
assertNull(responses[0].getResponse().getSource()); assertNull(responses[0].getResponse().getSource());
SearchResponse goodResponse =responses[0].getResponse().getResponse(); SearchResponse goodResponse =responses[0].getResponse().getResponse();
assertNotNull(goodResponse); assertNotNull(goodResponse);
@ -930,18 +958,18 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f)); assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f));
SearchHit hit = goodResponse.getHits().getHits()[0]; SearchHit hit = goodResponse.getHits().getHits()[0];
assertNotNull(hit.getExplanation()); assertNotNull(hit.getExplanation());
assertFalse(goodResponse.getProfileResults().isEmpty()); assertFalse(goodResponse.getProfileResults().isEmpty());
assertNull(responses[0].getResponse().getSource()); assertNull(responses[0].getResponse().getSource());
assertThat(responses[1].isFailure(), Matchers.is(true)); assertThat(responses[1].isFailure(), Matchers.is(true));
assertNotNull(responses[1].getFailureMessage()); assertNotNull(responses[1].getFailureMessage());
assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception")); assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception"));
} }
public void testMultiSearchTemplateAllBad() throws Exception { public void testMultiSearchTemplateAllBad() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
SearchTemplateRequest badRequest1 = new SearchTemplateRequest(); SearchTemplateRequest badRequest1 = new SearchTemplateRequest();
badRequest1.setRequest(new SearchRequest("index")); badRequest1.setRequest(new SearchRequest("index"));
badRequest1.setScriptType(ScriptType.INLINE); badRequest1.setScriptType(ScriptType.INLINE);
@ -957,8 +985,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER"); scriptParams.put("number", "BAD NUMBER");
badRequest1.setScriptParams(scriptParams); badRequest1.setScriptParams(scriptParams);
multiSearchTemplateRequest.add(badRequest1); multiSearchTemplateRequest.add(badRequest1);
SearchTemplateRequest badRequest2 = new SearchTemplateRequest(); SearchTemplateRequest badRequest2 = new SearchTemplateRequest();
badRequest2.setRequest(new SearchRequest("index")); badRequest2.setRequest(new SearchRequest("index"));
badRequest2.setScriptType(ScriptType.INLINE); badRequest2.setScriptType(ScriptType.INLINE);
@ -967,13 +995,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER"); scriptParams.put("number", "BAD NUMBER");
badRequest2.setScriptParams(scriptParams); badRequest2.setScriptParams(scriptParams);
multiSearchTemplateRequest.add(badRequest2); multiSearchTemplateRequest.add(badRequest2);
// The whole HTTP request should fail if no nested search requests are valid // The whole HTTP request should fail if no nested search requests are valid
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync)); highLevelClient()::multiSearchTemplateAsync));
assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertEquals(RestStatus.BAD_REQUEST, exception.status());
assertThat(exception.getMessage(), containsString("no requests added")); assertThat(exception.getMessage(), containsString("no requests added"));
} }

View File

@ -28,6 +28,9 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
@ -43,6 +46,7 @@ import java.util.stream.Collectors;
import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
public class SnapshotIT extends ESRestHighLevelClientTestCase { public class SnapshotIT extends ESRestHighLevelClientTestCase {
@ -173,6 +177,34 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
contains("test_snapshot1", "test_snapshot2")); contains("test_snapshot1", "test_snapshot2"));
} }
public void testSnapshotsStatus() throws IOException {
String testRepository = "test";
String testSnapshot = "snapshot";
String testIndex = "test_index";
PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}");
assertTrue(putRepositoryResponse.isAcknowledged());
createIndex(testIndex, Settings.EMPTY);
CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot);
createSnapshotRequest.indices(testIndex);
createSnapshotRequest.waitForCompletion(true);
CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
assertEquals(RestStatus.OK, createSnapshotResponse.status());
SnapshotsStatusRequest request = new SnapshotsStatusRequest();
request.repository(testRepository);
request.snapshots(new String[]{testSnapshot});
SnapshotsStatusResponse response = execute(request, highLevelClient().snapshot()::status,
highLevelClient().snapshot()::statusAsync);
assertThat(response.getSnapshots().size(), equalTo(1));
assertThat(response.getSnapshots().get(0).getSnapshot().getRepository(), equalTo(testRepository));
assertThat(response.getSnapshots().get(0).getSnapshot().getSnapshotId().getName(), equalTo(testSnapshot));
assertThat(response.getSnapshots().get(0).getIndices().containsKey(testIndex), is(true));
}
public void testDeleteSnapshot() throws IOException { public void testDeleteSnapshot() throws IOException {
String repository = "test_repository"; String repository = "test_repository";
String snapshot = "test_snapshot"; String snapshot = "test_snapshot";

View File

@ -19,8 +19,6 @@
package org.elasticsearch.client.documentation; package org.elasticsearch.client.documentation;
import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteRequest;
@ -66,7 +64,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -756,7 +753,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testGet() throws Exception { public void testGet() throws Exception {
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();
{ {
String mappings = "{\n" + Request createIndex = new Request("PUT", "/posts");
createIndex.setJsonEntity(
"{\n" +
" \"mappings\" : {\n" + " \"mappings\" : {\n" +
" \"doc\" : {\n" + " \"doc\" : {\n" +
" \"properties\" : {\n" + " \"properties\" : {\n" +
@ -767,10 +766,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
" }\n" + " }\n" +
" }\n" + " }\n" +
" }\n" + " }\n" +
"}"; "}");
Response response = client().performRequest(createIndex);
NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON);
Response response = client().performRequest("PUT", "/posts", Collections.emptyMap(), entity);
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());
IndexRequest indexRequest = new IndexRequest("posts", "doc", "1") IndexRequest indexRequest = new IndexRequest("posts", "doc", "1")
@ -1071,21 +1068,21 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();
{ {
String mappings = "{\n" + Request createIndex = new Request("PUT", "/index");
" \"mappings\" : {\n" + createIndex.setJsonEntity(
" \"type\" : {\n" + "{\n" +
" \"properties\" : {\n" + " \"mappings\" : {\n" +
" \"foo\" : {\n" + " \"type\" : {\n" +
" \"type\": \"text\",\n" + " \"properties\" : {\n" +
" \"store\": true\n" + " \"foo\" : {\n" +
" }\n" + " \"type\": \"text\",\n" +
" }\n" + " \"store\": true\n" +
" }\n" + " }\n" +
" }\n" + " }\n" +
"}"; " }\n" +
" }\n" +
NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON); "}");
Response response = client().performRequest("PUT", "/index", Collections.emptyMap(), entity); Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());
} }

View File

@ -37,11 +37,16 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Request; import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -84,8 +89,8 @@ import static org.hamcrest.Matchers.equalTo;
public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase { public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase {
private static final String repositoryName = "test_repository"; private static final String repositoryName = "test_repository";
private static final String snapshotName = "test_snapshot"; private static final String snapshotName = "test_snapshot";
private static final String indexName = "test_index";
public void testSnapshotCreateRepository() throws IOException { public void testSnapshotCreateRepository() throws IOException {
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();
@ -466,6 +471,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();
createTestRepositories(); createTestRepositories();
createTestIndex();
createTestSnapshots(); createTestSnapshots();
// tag::get-snapshots-request // tag::get-snapshots-request
@ -543,10 +549,84 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
} }
} }
public void testSnapshotSnapshotsStatus() throws IOException {
RestHighLevelClient client = highLevelClient();
createTestRepositories();
createTestIndex();
createTestSnapshots();
// tag::snapshots-status-request
SnapshotsStatusRequest request = new SnapshotsStatusRequest();
// end::snapshots-status-request
// tag::snapshots-status-request-repository
request.repository(repositoryName); // <1>
// end::snapshots-status-request-repository
// tag::snapshots-status-request-snapshots
String [] snapshots = new String[] {snapshotName};
request.snapshots(snapshots); // <1>
// end::snapshots-status-request-snapshots
// tag::snapshots-status-request-ignoreUnavailable
request.ignoreUnavailable(true); // <1>
// end::snapshots-status-request-ignoreUnavailable
// tag::snapshots-status-request-masterTimeout
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
request.masterNodeTimeout("1m"); // <2>
// end::snapshots-status-request-masterTimeout
// tag::snapshots-status-execute
SnapshotsStatusResponse response = client.snapshot().status(request, RequestOptions.DEFAULT);
// end::snapshots-status-execute
// tag::snapshots-status-response
List<SnapshotStatus> snapshotStatusesResponse = response.getSnapshots();
SnapshotStatus snapshotStatus = snapshotStatusesResponse.get(0); // <1>
SnapshotsInProgress.State snapshotState = snapshotStatus.getState(); // <2>
SnapshotStats shardStats = snapshotStatus.getIndices().get(indexName).getShards().get(0).getStats(); // <3>
// end::snapshots-status-response
assertThat(snapshotStatusesResponse.size(), equalTo(1));
assertThat(snapshotStatusesResponse.get(0).getSnapshot().getRepository(), equalTo(SnapshotClientDocumentationIT.repositoryName));
assertThat(snapshotStatusesResponse.get(0).getSnapshot().getSnapshotId().getName(), equalTo(snapshotName));
assertThat(snapshotState.completed(), equalTo(true));
}
public void testSnapshotSnapshotsStatusAsync() throws InterruptedException {
RestHighLevelClient client = highLevelClient();
{
SnapshotsStatusRequest request = new SnapshotsStatusRequest();
// tag::snapshots-status-execute-listener
ActionListener<SnapshotsStatusResponse> listener =
new ActionListener<SnapshotsStatusResponse>() {
@Override
public void onResponse(SnapshotsStatusResponse snapshotsStatusResponse) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::snapshots-status-execute-listener
// Replace the empty listener with a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::snapshots-status-execute-async
client.snapshot().statusAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::snapshots-status-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testSnapshotDeleteSnapshot() throws IOException { public void testSnapshotDeleteSnapshot() throws IOException {
RestHighLevelClient client = highLevelClient(); RestHighLevelClient client = highLevelClient();
createTestRepositories(); createTestRepositories();
createTestIndex();
createTestSnapshots(); createTestSnapshots();
// tag::delete-snapshot-request // tag::delete-snapshot-request
@ -608,9 +688,14 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged()); assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged());
} }
private void createTestIndex() throws IOException {
createIndex(indexName, Settings.EMPTY);
}
private void createTestSnapshots() throws IOException { private void createTestSnapshots() throws IOException {
Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName)); Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName));
createSnapshot.addParameter("wait_for_completion", "true"); createSnapshot.addParameter("wait_for_completion", "true");
createSnapshot.setJsonEntity("{\"indices\":\"" + indexName + "\"}");
Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot); Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot);
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());

View File

@ -76,7 +76,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
try { try {
try (RestClient client = buildRestClient()) { try (RestClient client = buildRestClient()) {
try { try {
client.performRequest("GET", "/"); client.performRequest(new Request("GET", "/"));
fail("connection should have been rejected due to SSL handshake"); fail("connection should have been rejected due to SSL handshake");
} catch (Exception e) { } catch (Exception e) {
assertThat(e.getMessage(), containsString("General SSLEngine problem")); assertThat(e.getMessage(), containsString("General SSLEngine problem"));
@ -85,7 +85,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
SSLContext.setDefault(getSslContext()); SSLContext.setDefault(getSslContext());
try (RestClient client = buildRestClient()) { try (RestClient client = buildRestClient()) {
Response response = client.performRequest("GET", "/"); Response response = client.performRequest(new Request("GET", "/"));
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());
} }
} finally { } finally {

View File

@ -256,35 +256,51 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testEncodeParams() throws IOException { public void testEncodeParams() throws IOException {
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this/is/the/routing")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "this/is/the/routing");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%2Fis%2Fthe%2Frouting", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=this%2Fis%2Fthe%2Frouting", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this|is|the|routing")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "this|is|the|routing");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%7Cis%7Cthe%7Crouting", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=this%7Cis%7Cthe%7Crouting", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "routing#1")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "routing#1");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=routing%231", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=routing%231", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "中文")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "中文");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=%E4%B8%AD%E6%96%87", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=%E4%B8%AD%E6%96%87", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo bar")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "foo bar");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo+bar")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "foo+bar");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Bbar", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=foo%2Bbar", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo/bar")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "foo/bar");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Fbar", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=foo%2Fbar", response.getRequestLine().getUri());
} }
{ {
Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo^bar")); Request request = new Request("PUT", "/200");
request.addParameter("routing", "foo^bar");
Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%5Ebar", response.getRequestLine().getUri()); assertEquals(pathPrefix + "/200?routing=foo%5Ebar", response.getRequestLine().getUri());
} }
} }
@ -341,14 +357,14 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testUrlWithoutLeadingSlash() throws Exception { public void testUrlWithoutLeadingSlash() throws Exception {
if (pathPrefix.length() == 0) { if (pathPrefix.length() == 0) {
try { try {
restClient.performRequest("GET", "200"); restClient.performRequest(new Request("GET", "200"));
fail("request should have failed"); fail("request should have failed");
} catch (ResponseException e) { } catch (ResponseException e) {
assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
} }
} else { } else {
{ {
Response response = restClient.performRequest("GET", "200"); Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured //a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());
} }
@ -357,7 +373,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
try (RestClient restClient = RestClient.builder( try (RestClient restClient = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()))
.setPathPrefix(pathPrefix.substring(1)).build()) { .setPathPrefix(pathPrefix.substring(1)).build()) {
Response response = restClient.performRequest("GET", "200"); Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured //a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(200, response.getStatusLine().getStatusCode());
} }

View File

@ -267,7 +267,7 @@ public class RestClientDocumentation {
} }
{ {
//tag::rest-client-response2 //tag::rest-client-response2
Response response = restClient.performRequest("GET", "/"); Response response = restClient.performRequest(new Request("GET", "/"));
RequestLine requestLine = response.getRequestLine(); // <1> RequestLine requestLine = response.getRequestLine(); // <1>
HttpHost host = response.getHost(); // <2> HttpHost host = response.getHost(); // <2>
int statusCode = response.getStatusLine().getStatusCode(); // <3> int statusCode = response.getStatusLine().getStatusCode(); // <3>

View File

@ -0,0 +1,97 @@
[[java-rest-high-snapshot-snapshots-status]]
=== Snapshots Status API
The Snapshots Status API allows to retrieve detailed information about snapshots in progress.
[[java-rest-high-snapshot-snapshots-status-request]]
==== Snapshots Status Request
A `SnapshotsStatusRequest`:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request]
--------------------------------------------------
==== Required Arguments
The following arguments must be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-repository]
--------------------------------------------------
<1> Sets the repository to check for snapshot statuses
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-snapshots]
--------------------------------------------------
<1> The list of snapshot names to check the status of
==== Optional Arguments
The following arguments can optionally be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-ignoreUnavailable]
--------------------------------------------------
<1> The command will fail if some of the snapshots are unavailable. The `ignore_unavailable` flag
set to true will return all snapshots that are currently available.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-masterTimeout]
--------------------------------------------------
<1> Timeout to connect to the master node as a `TimeValue`
<2> Timeout to connect to the master node as a `String`
[[java-rest-high-snapshot-snapshots-status-sync]]
==== Synchronous Execution
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute]
--------------------------------------------------
[[java-rest-high-snapshot-snapshots-status-async]]
==== Asynchronous Execution
The asynchronous execution of retrieving snapshot statuses requires both the
`SnapshotsStatusRequest` instance and an `ActionListener` instance to be
passed to the asynchronous method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-async]
--------------------------------------------------
<1> The `SnapshotsStatusRequest` to execute and the `ActionListener`
to use when the execution completes
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A typical listener for `SnapshotsStatusResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
<2> Called in case of a failure. The raised exception is provided as an argument
[[java-rest-high-snapshot-snapshots-status-response]]
==== Snapshots Status Response
The returned `SnapshotsStatusResponse` allows to retrieve information about the
executed operation as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-response]
--------------------------------------------------
<1> Request contains a list of snapshot statuses
<2> Each status contains information about the snapshot
<3> Example of reading snapshot statistics about a specific index and shard

View File

@ -154,6 +154,7 @@ The Java High Level REST Client supports the following Snapshot APIs:
* <<java-rest-high-snapshot-verify-repository>> * <<java-rest-high-snapshot-verify-repository>>
* <<java-rest-high-snapshot-create-snapshot>> * <<java-rest-high-snapshot-create-snapshot>>
* <<java-rest-high-snapshot-get-snapshots>> * <<java-rest-high-snapshot-get-snapshots>>
* <<java-rest-high-snapshot-snapshots-status>>
* <<java-rest-high-snapshot-delete-snapshot>> * <<java-rest-high-snapshot-delete-snapshot>>
include::snapshot/get_repository.asciidoc[] include::snapshot/get_repository.asciidoc[]
@ -162,6 +163,7 @@ include::snapshot/delete_repository.asciidoc[]
include::snapshot/verify_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[]
include::snapshot/create_snapshot.asciidoc[] include::snapshot/create_snapshot.asciidoc[]
include::snapshot/get_snapshots.asciidoc[] include::snapshot/get_snapshots.asciidoc[]
include::snapshot/snapshots_status.asciidoc[]
include::snapshot/delete_snapshot.asciidoc[] include::snapshot/delete_snapshot.asciidoc[]
== Tasks APIs == Tasks APIs

View File

@ -33,7 +33,7 @@ Available expressions for interval: `year` (`1y`), `quarter` (`1q`), `month` (`1
Time values can also be specified via abbreviations supported by <<time-units,time units>> parsing. Time values can also be specified via abbreviations supported by <<time-units,time units>> parsing.
Note that fractional time values are not supported, but you can address this by shifting to another Note that fractional time values are not supported, but you can address this by shifting to another
time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than
than days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not). days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not).
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------

View File

@ -104,6 +104,11 @@ With that out of the way, let's get started with the fun part...
== Installation == Installation
You can skip installation completely by using our hosted
Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
available on AWS and GCP. You can
https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed): Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed):
[source,sh] [source,sh]

View File

@ -1,6 +1,11 @@
[[install-elasticsearch]] [[install-elasticsearch]]
== Installing Elasticsearch == Installing Elasticsearch
Elasticsearch can be run on your own hardware or using our hosted
Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
available on AWS and GCP. You can
https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
Elasticsearch is provided in the following package formats: Elasticsearch is provided in the following package formats:
[horizontal] [horizontal]
@ -38,7 +43,7 @@ Elasticsearch on Windows. MSIs may be downloaded from the Elasticsearch website.
`docker`:: `docker`::
Images are available for running Elasticsearch as Docker containers. They may be Images are available for running Elasticsearch as Docker containers. They may be
downloaded from the Elastic Docker Registry. downloaded from the Elastic Docker Registry.
+ +
{ref}/docker.html[Install {es} with Docker] {ref}/docker.html[Install {es} with Docker]

View File

@ -8,8 +8,6 @@ A list of all published Docker images and tags can be found in
https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found
on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub].
==== Image types
These images are free to use under the Elastic license. They contain open source These images are free to use under the Elastic license. They contain open source
and free commercial features and access to paid commercial features. and free commercial features and access to paid commercial features.
{xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the {xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the
@ -17,9 +15,6 @@ paid commercial features. See the
https://www.elastic.co/subscriptions[Subscriptions] page for information about https://www.elastic.co/subscriptions[Subscriptions] page for information about
Elastic license levels. Elastic license levels.
Alternatively, you can download `-oss` images, which contain only features that
are available under the Apache 2.0 license.
==== Pulling the image ==== Pulling the image
Obtaining {es} for Docker is as simple as issuing a +docker pull+ command Obtaining {es} for Docker is as simple as issuing a +docker pull+ command
@ -34,14 +29,17 @@ endif::[]
ifeval::["{release-state}"!="unreleased"] ifeval::["{release-state}"!="unreleased"]
Docker images can be retrieved with the following commands: For example, the Docker image can be retrieved with the following command:
["source","sh",subs="attributes"] ["source","sh",subs="attributes"]
-------------------------------------------- --------------------------------------------
docker pull {docker-repo}:{version} docker pull {docker-repo}:{version}
docker pull {docker-repo}-oss:{version}
-------------------------------------------- --------------------------------------------
Alternatively, you can download other Docker images that contain only features
that are available under the Apache 2.0 license from
https://www.docker.elastic.co[www.docker.elastic.co].
endif::[] endif::[]
[[docker-cli-run]] [[docker-cli-run]]

View File

@ -32,6 +32,8 @@ import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.ConfigurationUtils;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.Processor;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.TemplateScript;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormat;
@ -42,21 +44,22 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final String TYPE = "date_index_name"; public static final String TYPE = "date_index_name";
private final String field; private final String field;
private final String indexNamePrefix; private final TemplateScript.Factory indexNamePrefixTemplate;
private final String dateRounding; private final TemplateScript.Factory dateRoundingTemplate;
private final String indexNameFormat; private final TemplateScript.Factory indexNameFormatTemplate;
private final DateTimeZone timezone; private final DateTimeZone timezone;
private final List<Function<String, DateTime>> dateFormats; private final List<Function<String, DateTime>> dateFormats;
DateIndexNameProcessor(String tag, String field, List<Function<String, DateTime>> dateFormats, DateTimeZone timezone, DateIndexNameProcessor(String tag, String field, List<Function<String, DateTime>> dateFormats, DateTimeZone timezone,
String indexNamePrefix, String dateRounding, String indexNameFormat) { TemplateScript.Factory indexNamePrefixTemplate, TemplateScript.Factory dateRoundingTemplate,
TemplateScript.Factory indexNameFormatTemplate) {
super(tag); super(tag);
this.field = field; this.field = field;
this.timezone = timezone; this.timezone = timezone;
this.dateFormats = dateFormats; this.dateFormats = dateFormats;
this.indexNamePrefix = indexNamePrefix; this.indexNamePrefixTemplate = indexNamePrefixTemplate;
this.dateRounding = dateRounding; this.dateRoundingTemplate = dateRoundingTemplate;
this.indexNameFormat = indexNameFormat; this.indexNameFormatTemplate = indexNameFormatTemplate;
} }
@Override @Override
@ -83,6 +86,9 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
if (dateTime == null) { if (dateTime == null) {
throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException); throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException);
} }
String indexNamePrefix = ingestDocument.renderTemplate(indexNamePrefixTemplate);
String indexNameFormat = ingestDocument.renderTemplate(indexNameFormatTemplate);
String dateRounding = ingestDocument.renderTemplate(dateRoundingTemplate);
DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat); DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat);
StringBuilder builder = new StringBuilder() StringBuilder builder = new StringBuilder()
@ -106,16 +112,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
return field; return field;
} }
String getIndexNamePrefix() { TemplateScript.Factory getIndexNamePrefixTemplate() {
return indexNamePrefix; return indexNamePrefixTemplate;
} }
String getDateRounding() { TemplateScript.Factory getDateRoundingTemplate() {
return dateRounding; return dateRoundingTemplate;
} }
String getIndexNameFormat() { TemplateScript.Factory getIndexNameFormatTemplate() {
return indexNameFormat; return indexNameFormatTemplate;
} }
DateTimeZone getTimezone() { DateTimeZone getTimezone() {
@ -128,6 +134,12 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final class Factory implements Processor.Factory { public static final class Factory implements Processor.Factory {
private final ScriptService scriptService;
public Factory(ScriptService scriptService) {
this.scriptService = scriptService;
}
@Override @Override
public DateIndexNameProcessor create(Map<String, Processor.Factory> registry, String tag, public DateIndexNameProcessor create(Map<String, Processor.Factory> registry, String tag,
Map<String, Object> config) throws Exception { Map<String, Object> config) throws Exception {
@ -154,9 +166,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", ""); String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", "");
TemplateScript.Factory indexNamePrefixTemplate =
ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_prefix", indexNamePrefix, scriptService);
String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding"); String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding");
TemplateScript.Factory dateRoundingTemplate =
ConfigurationUtils.compileTemplate(TYPE, tag, "date_rounding", dateRounding, scriptService);
String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd"); String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd");
return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat); TemplateScript.Factory indexNameFormatTemplate =
ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_format", indexNameFormat, scriptService);
return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefixTemplate,
dateRoundingTemplate, indexNameFormatTemplate);
} }
} }

View File

@ -73,7 +73,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory()); processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory());
processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService)); processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService));
processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory()); processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory());
processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory()); processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService));
processors.put(SortProcessor.TYPE, new SortProcessor.Factory()); processors.put(SortProcessor.TYPE, new SortProcessor.Factory());
processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters))); processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters)));
processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)); processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService));
@ -97,12 +97,12 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
Supplier<DiscoveryNodes> nodesInCluster) { Supplier<DiscoveryNodes> nodesInCluster) {
return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController)); return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController));
} }
@Override @Override
public List<Setting<?>> getSettings() { public List<Setting<?>> getSettings() {
return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME); return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME);
} }
private static ThreadWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) { private static ThreadWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) {
long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis(); long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis();
long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis(); long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis();

View File

@ -20,18 +20,20 @@
package org.elasticsearch.ingest.common; package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
public class DateIndexNameFactoryTests extends ESTestCase { public class DateIndexNameFactoryTests extends ESTestCase {
public void testDefaults() throws Exception { public void testDefaults() throws Exception {
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("field", "_field"); config.put("field", "_field");
config.put("date_rounding", "y"); config.put("date_rounding", "y");
@ -39,14 +41,14 @@ public class DateIndexNameFactoryTests extends ESTestCase {
DateIndexNameProcessor processor = factory.create(null, null, config); DateIndexNameProcessor processor = factory.create(null, null, config);
assertThat(processor.getDateFormats().size(), Matchers.equalTo(1)); assertThat(processor.getDateFormats().size(), Matchers.equalTo(1));
assertThat(processor.getField(), Matchers.equalTo("_field")); assertThat(processor.getField(), Matchers.equalTo("_field"));
assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("")); assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo(""));
assertThat(processor.getDateRounding(), Matchers.equalTo("y")); assertThat(processor.getDateRoundingTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("y"));
assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd")); assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyy-MM-dd"));
assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC)); assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC));
} }
public void testSpecifyOptionalSettings() throws Exception { public void testSpecifyOptionalSettings() throws Exception {
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("field", "_field"); config.put("field", "_field");
config.put("index_name_prefix", "_prefix"); config.put("index_name_prefix", "_prefix");
@ -63,7 +65,7 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("index_name_format", "yyyyMMdd"); config.put("index_name_format", "yyyyMMdd");
processor = factory.create(null, null, config); processor = factory.create(null, null, config);
assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd")); assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyyMMdd"));
config = new HashMap<>(); config = new HashMap<>();
config.put("field", "_field"); config.put("field", "_field");
@ -80,11 +82,11 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("date_rounding", "y"); config.put("date_rounding", "y");
processor = factory.create(null, null, config); processor = factory.create(null, null, config);
assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix")); assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("_prefix"));
} }
public void testRequiredFields() throws Exception { public void testRequiredFields() throws Exception {
DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map<String, Object> config = new HashMap<>(); Map<String, Object> config = new HashMap<>();
config.put("date_rounding", "y"); config.put("date_rounding", "y");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
@ -95,5 +97,4 @@ public class DateIndexNameFactoryTests extends ESTestCase {
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing")); assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing"));
} }
} }

View File

@ -19,11 +19,14 @@
package org.elasticsearch.ingest.common; package org.elasticsearch.ingest.common;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import java.util.Collections; import java.util.Collections;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.function.Function; import java.util.function.Function;
@ -33,11 +36,8 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testJodaPattern() throws Exception { public void testJodaPattern() throws Exception {
Function<String, DateTime> function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT); Function<String, DateTime> function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT);
DateIndexNameProcessor processor = new DateIndexNameProcessor( DateIndexNameProcessor processor = createProcessor("_field", Collections.singletonList(function),
"_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, DateTimeZone.UTC, "events-", "y", "yyyyMMdd");
"events-", "y", "yyyyMMdd"
);
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z")); Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z"));
processor.execute(document); processor.execute(document);
@ -46,7 +46,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testTAI64N()throws Exception { public void testTAI64N()throws Exception {
Function<String, DateTime> function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null); Function<String, DateTime> function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"));
@ -56,7 +56,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnixMs()throws Exception { public void testUnixMs()throws Exception {
Function<String, DateTime> function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null); Function<String, DateTime> function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000500")); Collections.singletonMap("_field", "1000500"));
@ -71,7 +71,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnix()throws Exception { public void testUnix()throws Exception {
Function<String, DateTime> function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); Function<String, DateTime> function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000.5")); Collections.singletonMap("_field", "1000.5"));
@ -79,4 +79,33 @@ public class DateIndexNameProcessorTests extends ESTestCase {
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>")); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
} }
public void testTemplatedFields() throws Exception {
String indexNamePrefix = randomAlphaOfLength(10);
String dateRounding = randomFrom("y", "M", "w", "d", "h", "m", "s");
String indexNameFormat = randomFrom("yyyy-MM-dd'T'HH:mm:ss.SSSZ", "yyyyMMdd", "MM/dd/yyyy");
String date = Integer.toString(randomInt());
Function<String, DateTime> dateTimeFunction = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = createProcessor("_field",
Collections.singletonList(dateTimeFunction), DateTimeZone.UTC, indexNamePrefix,
dateRounding, indexNameFormat);
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", date));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"),
equalTo("<"+indexNamePrefix+"{"+DateTimeFormat.forPattern(indexNameFormat)
.print(dateTimeFunction.apply(date))+"||/"+dateRounding+"{"+indexNameFormat+"|UTC}}>"));
}
private DateIndexNameProcessor createProcessor(String field, List<Function<String, DateTime>> dateFormats,
DateTimeZone timezone, String indexNamePrefix, String dateRounding,
String indexNameFormat) {
return new DateIndexNameProcessor(randomAlphaOfLength(10), field, dateFormats, timezone,
new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix),
new TestTemplateService.MockTemplateScript.Factory(dateRounding),
new TestTemplateService.MockTemplateScript.Factory(indexNameFormat)
);
}
} }

View File

@ -24,9 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.script.TemplateScript; import org.elasticsearch.script.TemplateScript;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -36,19 +37,21 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.joda.time.DateTimeZone.UTC;
public class DateProcessorTests extends ESTestCase { public class DateProcessorTests extends ESTestCase {
private TemplateScript.Factory templatize(Locale locale) { private TemplateScript.Factory templatize(Locale locale) {
return new TestTemplateService.MockTemplateScript.Factory(locale.getLanguage()); return new TestTemplateService.MockTemplateScript.Factory(locale.getLanguage());
} }
private TemplateScript.Factory templatize(DateTimeZone timezone) { private TemplateScript.Factory templatize(ZoneId timezone) {
return new TestTemplateService.MockTemplateScript.Factory(timezone.getID()); // prevent writing "UTC" as string, as joda time does not parse it
String id = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId();
return new TestTemplateService.MockTemplateScript.Factory(id);
} }
public void testJodaPattern() { public void testJodaPattern() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "2010 12 06 11:05:15"); document.put("date_as_string", "2010 12 06 11:05:15");
@ -63,7 +66,7 @@ public class DateProcessorTests extends ESTestCase {
matchFormats.add("dd/MM/yyyy"); matchFormats.add("dd/MM/yyyy");
matchFormats.add("dd-MM-yyyy"); matchFormats.add("dd-MM-yyyy");
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", matchFormats, "date_as_date"); "date_as_string", matchFormats, "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
@ -98,7 +101,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidJodaPattern() { public void testInvalidJodaPattern() {
try { try {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
templatize(UTC), templatize(randomLocale(random())), templatize(ZoneOffset.UTC), templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("invalid pattern"), "date_as_date"); "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "2010"); document.put("date_as_string", "2010");
@ -112,7 +115,7 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternLocale() { public void testJodaPatternLocale() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ITALIAN), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN),
"date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "2010 12 giugno"); document.put("date_as_string", "2010 12 giugno");
@ -123,18 +126,18 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternDefaultYear() { public void testJodaPatternDefaultYear() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); "date_as_string", Collections.singletonList("dd/MM"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "12/06"); document.put("date_as_string", "12/06");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
dateProcessor.execute(ingestDocument); dateProcessor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("date_as_date", String.class), assertThat(ingestDocument.getFieldValue("date_as_date", String.class),
equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
} }
public void testTAI64N() { public void testTAI64N() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(DateTimeZone.forOffsetHours(2)), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.ofHours(2)),
templatize(randomLocale(random())), templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("TAI64N"), "date_as_date"); "date_as_string", Collections.singletonList("TAI64N"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
@ -146,8 +149,8 @@ public class DateProcessorTests extends ESTestCase {
} }
public void testUnixMs() { public void testUnixMs() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), templatize(randomLocale(random())), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
"date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "1000500"); document.put("date_as_string", "1000500");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
@ -162,7 +165,7 @@ public class DateProcessorTests extends ESTestCase {
} }
public void testUnix() { public void testUnix() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
templatize(randomLocale(random())), templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("UNIX"), "date_as_date"); "date_as_string", Collections.singletonList("UNIX"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
@ -186,7 +189,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidLocale() { public void testInvalidLocale() {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
templatize(UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"), templatize(ZoneOffset.UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"),
"date_as_string", Collections.singletonList("yyyy"), "date_as_date"); "date_as_string", Collections.singletonList("yyyy"), "date_as_date");
Map<String, Object> document = new HashMap<>(); Map<String, Object> document = new HashMap<>();
document.put("date_as_string", "2010"); document.put("date_as_string", "2010");

View File

@ -19,19 +19,13 @@
package org.elasticsearch.index.reindex; package org.elasticsearch.index.reindex;
import org.apache.http.entity.ContentType; import org.elasticsearch.client.Request;
import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.Before; import org.junit.Before;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasEntry;
/** /**
@ -50,48 +44,69 @@ public class ManyDocumentsIT extends ESRestTestCase {
bulk.append("{\"index\":{}}\n"); bulk.append("{\"index\":{}}\n");
bulk.append("{\"test\":\"test\"}\n"); bulk.append("{\"test\":\"test\"}\n");
} }
client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), Request request = new Request("POST", "/test/test/_bulk");
new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); request.addParameter("refresh", "true");
request.setJsonEntity(bulk.toString());
client().performRequest(request);
} }
public void testReindex() throws IOException { public void testReindex() throws IOException {
Map<String, Object> response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity( Request request = new Request("POST", "/_reindex");
"{\"source\":{\"index\":\"test\"}, \"dest\":{\"index\":\"des\"}}", request.setJsonEntity(
ContentType.APPLICATION_JSON))); "{\n" +
" \"source\":{\n" +
" \"index\":\"test\"\n" +
" },\n" +
" \"dest\":{\n" +
" \"index\":\"des\"\n" +
" }\n" +
"}");
Map<String, Object> response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count)); assertThat(response, hasEntry("created", count));
} }
public void testReindexFromRemote() throws IOException { public void testReindexFromRemote() throws IOException {
Map<?, ?> nodesInfo = toMap(client().performRequest("GET", "/_nodes/http")); Map<?, ?> nodesInfo = entityAsMap(client().performRequest(new Request("GET", "/_nodes/http")));
nodesInfo = (Map<?, ?>) nodesInfo.get("nodes"); nodesInfo = (Map<?, ?>) nodesInfo.get("nodes");
Map<?, ?> nodeInfo = (Map<?, ?>) nodesInfo.values().iterator().next(); Map<?, ?> nodeInfo = (Map<?, ?>) nodesInfo.values().iterator().next();
Map<?, ?> http = (Map<?, ?>) nodeInfo.get("http"); Map<?, ?> http = (Map<?, ?>) nodeInfo.get("http");
String remote = "http://"+ http.get("publish_address"); String remote = "http://"+ http.get("publish_address");
Map<String, Object> response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity( Request request = new Request("POST", "/_reindex");
"{\"source\":{\"index\":\"test\",\"remote\":{\"host\":\"" + remote + "\"}}, \"dest\":{\"index\":\"des\"}}", request.setJsonEntity(
ContentType.APPLICATION_JSON))); "{\n" +
" \"source\":{\n" +
" \"index\":\"test\",\n" +
" \"remote\":{\n" +
" \"host\":\"" + remote + "\"\n" +
" }\n" +
" }\n," +
" \"dest\":{\n" +
" \"index\":\"des\"\n" +
" }\n" +
"}");
Map<String, Object> response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count)); assertThat(response, hasEntry("created", count));
} }
public void testUpdateByQuery() throws IOException { public void testUpdateByQuery() throws IOException {
Map<String, Object> response = toMap(client().performRequest("POST", "/test/_update_by_query")); Map<String, Object> response = entityAsMap(client().performRequest(new Request("POST", "/test/_update_by_query")));
assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("updated", count)); assertThat(response, hasEntry("updated", count));
} }
public void testDeleteByQuery() throws IOException { public void testDeleteByQuery() throws IOException {
Map<String, Object> response = toMap(client().performRequest("POST", "/test/_delete_by_query", emptyMap(), new StringEntity( Request request = new Request("POST", "/test/_delete_by_query");
"{\"query\":{\"match_all\":{}}}", request.setJsonEntity(
ContentType.APPLICATION_JSON))); "{\n" +
" \"query\":{\n" +
" \"match_all\": {}\n" +
" }\n" +
"}");
Map<String, Object> response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("deleted", count)); assertThat(response, hasEntry("deleted", count));
} }
static Map<String, Object> toMap(Response response) throws IOException {
return XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false);
}
} }

View File

@ -19,25 +19,24 @@
package org.elasticsearch.index.reindex.remote; package org.elasticsearch.index.reindex.remote;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils; import org.apache.http.util.EntityUtils;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
import java.util.TreeMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
public class ReindexFromOldRemoteIT extends ESRestTestCase { public class ReindexFromOldRemoteIT extends ESRestTestCase {
/**
* Number of documents to test when reindexing from an old version.
*/
private static final int DOCS = 5;
private void oldEsTestCase(String portPropertyName, String requestsPerSecond) throws IOException { private void oldEsTestCase(String portPropertyName, String requestsPerSecond) throws IOException {
boolean enabled = Booleans.parseBoolean(System.getProperty("tests.fromOld")); boolean enabled = Booleans.parseBoolean(System.getProperty("tests.fromOld"));
assumeTrue("test is disabled, probably because this is windows", enabled); assumeTrue("test is disabled, probably because this is windows", enabled);
@ -45,17 +44,19 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
int oldEsPort = Integer.parseInt(System.getProperty(portPropertyName)); int oldEsPort = Integer.parseInt(System.getProperty(portPropertyName));
try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) {
try { try {
HttpEntity entity = new StringEntity("{\"settings\":{\"number_of_shards\": 1}}", ContentType.APPLICATION_JSON); Request createIndex = new Request("PUT", "/test");
oldEs.performRequest("PUT", "/test", singletonMap("refresh", "true"), entity); createIndex.setJsonEntity("{\"settings\":{\"number_of_shards\": 1}}");
oldEs.performRequest(createIndex);
entity = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON); for (int i = 0; i < DOCS; i++) {
oldEs.performRequest("PUT", "/test/doc/testdoc1", singletonMap("refresh", "true"), entity); Request doc = new Request("PUT", "/test/doc/testdoc" + i);
oldEs.performRequest("PUT", "/test/doc/testdoc2", singletonMap("refresh", "true"), entity); doc.addParameter("refresh", "true");
oldEs.performRequest("PUT", "/test/doc/testdoc3", singletonMap("refresh", "true"), entity); doc.setJsonEntity("{\"test\":\"test\"}");
oldEs.performRequest("PUT", "/test/doc/testdoc4", singletonMap("refresh", "true"), entity); oldEs.performRequest(doc);
oldEs.performRequest("PUT", "/test/doc/testdoc5", singletonMap("refresh", "true"), entity); }
entity = new StringEntity( Request reindex = new Request("POST", "/_reindex");
reindex.setJsonEntity(
"{\n" "{\n"
+ " \"source\":{\n" + " \"source\":{\n"
+ " \"index\": \"test\",\n" + " \"index\": \"test\",\n"
@ -67,36 +68,23 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
+ " \"dest\": {\n" + " \"dest\": {\n"
+ " \"index\": \"test\"\n" + " \"index\": \"test\"\n"
+ " }\n" + " }\n"
+ "}", + "}");
ContentType.APPLICATION_JSON); reindex.addParameter("refresh", "true");
Map<String, String> params = new TreeMap<>(); reindex.addParameter("pretty", "true");
params.put("refresh", "true");
params.put("pretty", "true");
if (requestsPerSecond != null) { if (requestsPerSecond != null) {
params.put("requests_per_second", requestsPerSecond); reindex.addParameter("requests_per_second", requestsPerSecond);
} }
client().performRequest("POST", "/_reindex", params, entity); client().performRequest(reindex);
Response response = client().performRequest("POST", "test/_search", singletonMap("pretty", "true")); Request search = new Request("POST", "/test/_search");
search.addParameter("pretty", "true");
Response response = client().performRequest(search);
String result = EntityUtils.toString(response.getEntity()); String result = EntityUtils.toString(response.getEntity());
assertThat(result, containsString("\"_id\" : \"testdoc1\"")); for (int i = 0; i < DOCS; i++) {
} finally { assertThat(result, containsString("\"_id\" : \"testdoc" + i + "\""));
try {
oldEs.performRequest("DELETE", "/test");
} catch (ResponseException e) {
/* Try not to throw ResponseException for as it'll eat the
* real exception. This is because the rest client throws
* exceptions in a "funny" way that isn't compatible with
* `suppressed`. In the case of 404s we'll just log something
* and move on because that just means that a previous
* failure caused the index not to be created. */
if (e.getResponse().getStatusLine().getStatusCode() == 404) {
logger.warn("old index not deleted because it doesn't exist");
} else {
logger.error("failed to remove old index", e);
fail("failed to remove old index, see log");
}
} }
} finally {
oldEs.performRequest(new Request("DELETE", "/test"));
} }
} }
} }

View File

@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType; import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity; import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.PathUtils;
@ -44,7 +45,6 @@ import java.net.URL;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
@ -70,8 +70,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
**/ **/
@Before @Before
public void registerRepositories() throws IOException { public void registerRepositories() throws IOException {
Response clusterSettingsResponse = client().performRequest("GET", "/_cluster/settings?include_defaults=true" + Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
"&filter_path=defaults.path.repo,defaults.repositories.url.allowed_urls"); clusterSettingsRequest.addParameter("include_defaults", "true");
clusterSettingsRequest.addParameter("filter_path", "defaults.path.repo,defaults.repositories.url.allowed_urls");
Response clusterSettingsResponse = client().performRequest(clusterSettingsRequest);
Map<String, Object> clusterSettings = entityAsMap(clusterSettingsResponse); Map<String, Object> clusterSettings = entityAsMap(clusterSettingsResponse);
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -83,13 +85,17 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize(); final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize();
// Create a FS repository using the path.repo location // Create a FS repository using the path.repo location
Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(), Request createFsRepositoryRequest = new Request("PUT", "/_snapshot/repository-fs");
buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build())); createFsRepositoryRequest.setEntity(buildRepositorySettings(FsRepository.TYPE,
Settings.builder().put("location", pathRepo).build()));
Response createFsRepositoryResponse = client().performRequest(createFsRepositoryRequest);
assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the file://{path.repo} URL // Create a URL repository using the file://{path.repo} URL
Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(), Request createFileRepositoryRequest = new Request("PUT", "/_snapshot/repository-file");
buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build())); createFileRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
Settings.builder().put("url", pathRepoUri.toString()).build()));
Response createFileRepositoryResponse = client().performRequest(createFileRepositoryRequest);
assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the http://{fixture} URL // Create a URL repository using the http://{fixture} URL
@ -99,8 +105,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
try { try {
InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost()); InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost());
if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) { if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) {
Response createUrlRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-url", emptyMap(), Request createUrlRepositoryRequest = new Request("PUT", "/_snapshot/repository-url");
buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", allowedUrl).build())); createUrlRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
Settings.builder().put("url", allowedUrl).build()));
Response createUrlRepositoryResponse = client().performRequest(createUrlRepositoryRequest);
assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
break; break;
} }
@ -126,4 +134,3 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
} }
} }
} }

View File

@ -92,23 +92,26 @@ String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary")
// If all these variables are missing then we are testing against the internal fixture instead, which has the following // If all these variables are missing then we are testing against the internal fixture instead, which has the following
// credentials hard-coded in. // credentials hard-coded in.
if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) {
&& !s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentAccessKey = 's3_integration_test_permanent_access_key'
s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key'
s3PermanentBucket = 'permanent-bucket-test' s3PermanentBucket = 'permanent-bucket-test'
s3PermanentBasePath = 'integration_test' s3PermanentBasePath = 'integration_test'
useFixture = true
} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) {
throw new IllegalArgumentException("not all options specified to run against external S3 service")
}
if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
s3TemporaryAccessKey = 's3_integration_test_temporary_access_key' s3TemporaryAccessKey = 's3_integration_test_temporary_access_key'
s3TemporarySecretKey = 's3_integration_test_temporary_secret_key' s3TemporarySecretKey = 's3_integration_test_temporary_secret_key'
s3TemporaryBucket = 'temporary-bucket-test' s3TemporaryBucket = 'temporary-bucket-test'
s3TemporaryBasePath = 'integration_test' s3TemporaryBasePath = 'integration_test'
s3TemporarySessionToken = 's3_integration_test_temporary_session_token' s3TemporarySessionToken = 's3_integration_test_temporary_session_token'
useFixture = true } else if (!s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath
|| !s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
throw new IllegalArgumentException("not all options specified to run against external S3 service") throw new IllegalArgumentException("not all options specified to run against external S3 service")
} }
@ -296,6 +299,13 @@ processTestResources {
MavenFilteringHack.filter(it, expansions) MavenFilteringHack.filter(it, expansions)
} }
project.afterEvaluate {
if (useFixture == false) {
// 30_repository_temporary_credentials is not ready for CI yet
integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*'
}
}
integTestCluster { integTestCluster {
keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey
keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey

View File

@ -19,16 +19,27 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContentFragment { public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContentFragment {
private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT; private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT;
@ -80,6 +91,14 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
this.nodeId = nodeId; this.nodeId = nodeId;
} }
SnapshotIndexShardStatus(ShardId shardId, SnapshotIndexShardStage stage, SnapshotStats stats, String nodeId, String failure) {
super(shardId);
this.stage = stage;
this.stats = stats;
this.nodeId = nodeId;
this.failure = failure;
}
/** /**
* Returns snapshot stage * Returns snapshot stage
*/ */
@ -143,7 +162,7 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Integer.toString(getShardId().getId())); builder.startObject(Integer.toString(getShardId().getId()));
builder.field(Fields.STAGE, getStage()); builder.field(Fields.STAGE, getStage());
stats.toXContent(builder, params); builder.field(SnapshotStats.Fields.STATS, stats, params);
if (getNodeId() != null) { if (getNodeId() != null) {
builder.field(Fields.NODE, getNodeId()); builder.field(Fields.NODE, getNodeId());
} }
@ -153,4 +172,72 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
builder.endObject(); builder.endObject();
return builder; return builder;
} }
static final ObjectParser.NamedObjectParser<SnapshotIndexShardStatus, String> PARSER;
static {
ConstructingObjectParser<SnapshotIndexShardStatus, ShardId> innerParser = new ConstructingObjectParser<>(
"snapshot_index_shard_status", true,
(Object[] parsedObjects, ShardId shard) -> {
int i = 0;
String rawStage = (String) parsedObjects[i++];
String nodeId = (String) parsedObjects[i++];
String failure = (String) parsedObjects[i++];
SnapshotStats stats = (SnapshotStats) parsedObjects[i];
SnapshotIndexShardStage stage;
try {
stage = SnapshotIndexShardStage.valueOf(rawStage);
} catch (IllegalArgumentException iae) {
throw new ElasticsearchParseException(
"failed to parse snapshot index shard status [{}][{}], unknonwn stage [{}]",
shard.getIndex().getName(), shard.getId(), rawStage);
}
return new SnapshotIndexShardStatus(shard, stage, stats, nodeId, failure);
}
);
innerParser.declareString(constructorArg(), new ParseField(Fields.STAGE));
innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.NODE));
innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.REASON));
innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS));
PARSER = (p, indexId, shardName) -> {
// Combine the index name in the context with the shard name passed in for the named object parser
// into a ShardId to pass as context for the inner parser.
int shard;
try {
shard = Integer.parseInt(shardName);
} catch (NumberFormatException nfe) {
throw new ElasticsearchParseException(
"failed to parse snapshot index shard status [{}], expected numeric shard id but got [{}]", indexId, shardName);
}
ShardId shardId = new ShardId(new Index(indexId, IndexMetaData.INDEX_UUID_NA_VALUE), shard);
return innerParser.parse(p, shardId);
};
}
public static SnapshotIndexShardStatus fromXContent(XContentParser parser, String indexId) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
return PARSER.parse(parser, indexId, parser.currentName());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotIndexShardStatus that = (SnapshotIndexShardStatus) o;
if (stage != that.stage) return false;
if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
if (nodeId != null ? !nodeId.equals(that.nodeId) : that.nodeId != null) return false;
return failure != null ? failure.equals(that.failure) : that.failure == null;
}
@Override
public int hashCode() {
int result = stage != null ? stage.hashCode() : 0;
result = 31 * result + (stats != null ? stats.hashCode() : 0);
result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
result = 31 * result + (failure != null ? failure.hashCode() : 0);
return result;
}
} }

View File

@ -19,17 +19,24 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/** /**
* Represents snapshot status of all shards in the index * Represents snapshot status of all shards in the index
@ -57,6 +64,14 @@ public class SnapshotIndexStatus implements Iterable<SnapshotIndexShardStatus>,
this.indexShards = unmodifiableMap(indexShards); this.indexShards = unmodifiableMap(indexShards);
} }
public SnapshotIndexStatus(String index, Map<Integer, SnapshotIndexShardStatus> indexShards, SnapshotShardsStats shardsStats,
SnapshotStats stats) {
this.index = index;
this.indexShards = indexShards;
this.shardsStats = shardsStats;
this.stats = stats;
}
/** /**
* Returns the index name * Returns the index name
*/ */
@ -97,8 +112,8 @@ public class SnapshotIndexStatus implements Iterable<SnapshotIndexShardStatus>,
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getIndex()); builder.startObject(getIndex());
shardsStats.toXContent(builder, params); builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
stats.toXContent(builder, params); builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(Fields.SHARDS); builder.startObject(Fields.SHARDS);
for (SnapshotIndexShardStatus shard : indexShards.values()) { for (SnapshotIndexShardStatus shard : indexShards.values()) {
shard.toXContent(builder, params); shard.toXContent(builder, params);
@ -107,4 +122,61 @@ public class SnapshotIndexStatus implements Iterable<SnapshotIndexShardStatus>,
builder.endObject(); builder.endObject();
return builder; return builder;
} }
static final ObjectParser.NamedObjectParser<SnapshotIndexStatus, Void> PARSER;
static {
ConstructingObjectParser<SnapshotIndexStatus, String> innerParser = new ConstructingObjectParser<>(
"snapshot_index_status", true,
(Object[] parsedObjects, String index) -> {
int i = 0;
SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
@SuppressWarnings("unchecked") List<SnapshotIndexShardStatus> shardStatuses =
(List<SnapshotIndexShardStatus>) parsedObjects[i];
final Map<Integer, SnapshotIndexShardStatus> indexShards;
if (shardStatuses == null || shardStatuses.isEmpty()) {
indexShards = emptyMap();
} else {
indexShards = new HashMap<>(shardStatuses.size());
for (SnapshotIndexShardStatus shardStatus : shardStatuses) {
indexShards.put(shardStatus.getShardId().getId(), shardStatus);
}
}
return new SnapshotIndexStatus(index, indexShards, shardsStats, stats);
});
innerParser.declareObject(constructorArg(), (p, c) -> SnapshotShardsStats.PARSER.apply(p, null),
new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p),
new ParseField(SnapshotStats.Fields.STATS));
innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS));
PARSER = ((p, c, name) -> innerParser.apply(p, name));
}
public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
return PARSER.parse(parser, null, parser.currentName());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotIndexStatus that = (SnapshotIndexStatus) o;
if (index != null ? !index.equals(that.index) : that.index != null) return false;
if (indexShards != null ? !indexShards.equals(that.indexShards) : that.indexShards != null) return false;
if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
return stats != null ? stats.equals(that.stats) : that.stats == null;
}
@Override
public int hashCode() {
int result = index != null ? index.hashCode() : 0;
result = 31 * result + (indexShards != null ? indexShards.hashCode() : 0);
result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
result = 31 * result + (stats != null ? stats.hashCode() : 0);
return result;
}
} }

View File

@ -19,17 +19,22 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/** /**
* Status of a snapshot shards * Status of a snapshot shards
*/ */
public class SnapshotShardsStats implements ToXContentFragment { public class SnapshotShardsStats implements ToXContentObject {
private int initializingShards; private int initializingShards;
private int startedShards; private int startedShards;
@ -63,6 +68,16 @@ public class SnapshotShardsStats implements ToXContentFragment {
} }
} }
public SnapshotShardsStats(int initializingShards, int startedShards, int finalizingShards, int doneShards, int failedShards,
int totalShards) {
this.initializingShards = initializingShards;
this.startedShards = startedShards;
this.finalizingShards = finalizingShards;
this.doneShards = doneShards;
this.failedShards = failedShards;
this.totalShards = totalShards;
}
/** /**
* Number of shards with the snapshot in the initializing stage * Number of shards with the snapshot in the initializing stage
*/ */
@ -117,15 +132,68 @@ public class SnapshotShardsStats implements ToXContentFragment {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.SHARDS_STATS); builder.startObject();
builder.field(Fields.INITIALIZING, getInitializingShards()); {
builder.field(Fields.STARTED, getStartedShards()); builder.field(Fields.INITIALIZING, getInitializingShards());
builder.field(Fields.FINALIZING, getFinalizingShards()); builder.field(Fields.STARTED, getStartedShards());
builder.field(Fields.DONE, getDoneShards()); builder.field(Fields.FINALIZING, getFinalizingShards());
builder.field(Fields.FAILED, getFailedShards()); builder.field(Fields.DONE, getDoneShards());
builder.field(Fields.TOTAL, getTotalShards()); builder.field(Fields.FAILED, getFailedShards());
builder.field(Fields.TOTAL, getTotalShards());
}
builder.endObject(); builder.endObject();
return builder; return builder;
} }
static final ConstructingObjectParser<SnapshotShardsStats, Void> PARSER = new ConstructingObjectParser<>(
Fields.SHARDS_STATS, true,
(Object[] parsedObjects) -> {
int i = 0;
int initializingShards = (int) parsedObjects[i++];
int startedShards = (int) parsedObjects[i++];
int finalizingShards = (int) parsedObjects[i++];
int doneShards = (int) parsedObjects[i++];
int failedShards = (int) parsedObjects[i++];
int totalShards = (int) parsedObjects[i];
return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
}
);
static {
PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING));
PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED));
PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING));
PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE));
PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED));
PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL));
}
public static SnapshotShardsStats fromXContent(XContentParser parser) throws IOException {
return PARSER.apply(parser, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotShardsStats that = (SnapshotShardsStats) o;
if (initializingShards != that.initializingShards) return false;
if (startedShards != that.startedShards) return false;
if (finalizingShards != that.finalizingShards) return false;
if (doneShards != that.doneShards) return false;
if (failedShards != that.failedShards) return false;
return totalShards == that.totalShards;
}
@Override
public int hashCode() {
int result = initializingShards;
result = 31 * result + startedShards;
result = 31 * result + finalizingShards;
result = 31 * result + doneShards;
result = 31 * result + failedShards;
result = 31 * result + totalShards;
return result;
}
} }

View File

@ -26,12 +26,14 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException; import java.io.IOException;
public class SnapshotStats implements Streamable, ToXContentFragment { public class SnapshotStats implements Streamable, ToXContentObject {
private long startTime; private long startTime;
private long time; private long time;
@ -176,35 +178,132 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject(Fields.STATS) builder.startObject();
// incremental starts {
.startObject(Fields.INCREMENTAL) builder.startObject(Fields.INCREMENTAL);
.field(Fields.FILE_COUNT, getIncrementalFileCount()) {
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize())) builder.field(Fields.FILE_COUNT, getIncrementalFileCount());
// incremental ends builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()));
.endObject(); }
builder.endObject();
if (getProcessedFileCount() != getIncrementalFileCount()) { if (getProcessedFileCount() != getIncrementalFileCount()) {
// processed starts builder.startObject(Fields.PROCESSED);
builder.startObject(Fields.PROCESSED) {
.field(Fields.FILE_COUNT, getProcessedFileCount()) builder.field(Fields.FILE_COUNT, getProcessedFileCount());
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize())) builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()));
// processed ends }
.endObject(); builder.endObject();
}
builder.startObject(Fields.TOTAL);
{
builder.field(Fields.FILE_COUNT, getTotalFileCount());
builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()));
}
builder.endObject();
// timings stats
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime());
builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
} }
// total starts
builder.startObject(Fields.TOTAL)
.field(Fields.FILE_COUNT, getTotalFileCount())
.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()))
// total ends
.endObject();
// timings stats
builder.field(Fields.START_TIME_IN_MILLIS, getStartTime())
.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
return builder.endObject(); return builder.endObject();
} }
public static SnapshotStats fromXContent(XContentParser parser) throws IOException {
// Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the
// object has historically been written as JSON versus how it is structured in Java.
XContentParser.Token token = parser.currentToken();
if (token == null) {
token = parser.nextToken();
}
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
long startTime = 0;
long time = 0;
int incrementalFileCount = 0;
int totalFileCount = 0;
int processedFileCount = 0;
long incrementalSize = 0;
long totalSize = 0;
long processedSize = 0;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String currentName = parser.currentName();
token = parser.nextToken();
if (currentName.equals(Fields.INCREMENTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
incrementalFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
incrementalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.PROCESSED)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
processedFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
processedSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.TOTAL)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String innerName = parser.currentName();
token = parser.nextToken();
if (innerName.equals(Fields.FILE_COUNT)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
totalFileCount = parser.intValue();
} else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
totalSize = parser.longValue();
} else {
// Unknown sub field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
} else if (currentName.equals(Fields.START_TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
startTime = parser.longValue();
} else if (currentName.equals(Fields.TIME_IN_MILLIS)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
time = parser.longValue();
} else {
// Unknown field, skip
if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
parser.skipChildren();
}
}
}
return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount, processedFileCount, incrementalSize, totalSize,
processedSize);
}
void add(SnapshotStats stats) { void add(SnapshotStats stats) {
incrementalFileCount += stats.incrementalFileCount; incrementalFileCount += stats.incrementalFileCount;
totalFileCount += stats.totalFileCount; totalFileCount += stats.totalFileCount;
@ -229,4 +328,34 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
time = endTime - startTime; time = endTime - startTime;
} }
} }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotStats that = (SnapshotStats) o;
if (startTime != that.startTime) return false;
if (time != that.time) return false;
if (incrementalFileCount != that.incrementalFileCount) return false;
if (totalFileCount != that.totalFileCount) return false;
if (processedFileCount != that.processedFileCount) return false;
if (incrementalSize != that.incrementalSize) return false;
if (totalSize != that.totalSize) return false;
return processedSize == that.processedSize;
}
@Override
public int hashCode() {
int result = (int) (startTime ^ (startTime >>> 32));
result = 31 * result + (int) (time ^ (time >>> 32));
result = 31 * result + incrementalFileCount;
result = 31 * result + totalFileCount;
result = 31 * result + processedFileCount;
result = 31 * result + (int) (incrementalSize ^ (incrementalSize >>> 32));
result = 31 * result + (int) (totalSize ^ (totalSize >>> 32));
result = 31 * result + (int) (processedSize ^ (processedSize >>> 32));
return result;
}
} }

View File

@ -20,15 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.SnapshotsInProgress.State; import org.elasticsearch.cluster.SnapshotsInProgress.State;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -40,7 +46,11 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap; import static java.util.Collections.unmodifiableMap;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/** /**
* Status of a snapshot * Status of a snapshot
@ -72,6 +82,18 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
updateShardStats(); updateShardStats();
} }
private SnapshotStatus(Snapshot snapshot, State state, List<SnapshotIndexShardStatus> shards,
Map<String, SnapshotIndexStatus> indicesStatus, SnapshotShardsStats shardsStats,
SnapshotStats stats, Boolean includeGlobalState) {
this.snapshot = snapshot;
this.state = state;
this.shards = shards;
this.indicesStatus = indicesStatus;
this.shardsStats = shardsStats;
this.stats = stats;
this.includeGlobalState = includeGlobalState;
}
SnapshotStatus() { SnapshotStatus() {
} }
@ -207,8 +229,8 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
if (includeGlobalState != null) { if (includeGlobalState != null) {
builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState); builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState);
} }
shardsStats.toXContent(builder, params); builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
stats.toXContent(builder, params); builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(INDICES); builder.startObject(INDICES);
for (SnapshotIndexStatus indexStatus : getIndices().values()) { for (SnapshotIndexStatus indexStatus : getIndices().values()) {
indexStatus.toXContent(builder, params); indexStatus.toXContent(builder, params);
@ -218,6 +240,52 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
return builder; return builder;
} }
static final ConstructingObjectParser<SnapshotStatus, Void> PARSER = new ConstructingObjectParser<>(
"snapshot_status", true,
(Object[] parsedObjects) -> {
int i = 0;
String name = (String) parsedObjects[i++];
String repository = (String) parsedObjects[i++];
String uuid = (String) parsedObjects[i++];
String rawState = (String) parsedObjects[i++];
Boolean includeGlobalState = (Boolean) parsedObjects[i++];
SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
@SuppressWarnings("unchecked") List<SnapshotIndexStatus> indices = ((List<SnapshotIndexStatus>) parsedObjects[i]);
Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid));
SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState);
Map<String, SnapshotIndexStatus> indicesStatus;
List<SnapshotIndexShardStatus> shards;
if (indices == null || indices.isEmpty()) {
indicesStatus = emptyMap();
shards = emptyList();
} else {
indicesStatus = new HashMap<>(indices.size());
shards = new ArrayList<>();
for (SnapshotIndexStatus index : indices) {
indicesStatus.put(index.getIndex(), index);
shards.addAll(index.getShards().values());
}
}
return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState);
});
static {
PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT));
PARSER.declareString(constructorArg(), new ParseField(REPOSITORY));
PARSER.declareString(constructorArg(), new ParseField(UUID));
PARSER.declareString(constructorArg(), new ParseField(STATE));
PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE));
PARSER.declareField(constructorArg(), SnapshotStats::fromXContent, new ParseField(SnapshotStats.Fields.STATS),
ObjectParser.ValueType.OBJECT);
PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES));
}
public static SnapshotStatus fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
private void updateShardStats() { private void updateShardStats() {
stats = new SnapshotStats(); stats = new SnapshotStats();
shardsStats = new SnapshotShardsStats(shards); shardsStats = new SnapshotShardsStats(shards);
@ -225,4 +293,31 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
stats.add(shard.getStats()); stats.add(shard.getStats());
} }
} }
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotStatus that = (SnapshotStatus) o;
if (snapshot != null ? !snapshot.equals(that.snapshot) : that.snapshot != null) return false;
if (state != that.state) return false;
if (indicesStatus != null ? !indicesStatus.equals(that.indicesStatus) : that.indicesStatus != null)
return false;
if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
return includeGlobalState != null ? includeGlobalState.equals(that.includeGlobalState) : that.includeGlobalState == null;
}
@Override
public int hashCode() {
int result = snapshot != null ? snapshot.hashCode() : 0;
result = 31 * result + (state != null ? state.hashCode() : 0);
result = 31 * result + (indicesStatus != null ? indicesStatus.hashCode() : 0);
result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
result = 31 * result + (stats != null ? stats.hashCode() : 0);
result = 31 * result + (includeGlobalState != null ? includeGlobalState.hashCode() : 0);
return result;
}
} }

View File

@ -20,16 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status; package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/** /**
* Snapshot status response * Snapshot status response
*/ */
@ -85,4 +90,33 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten
return builder; return builder;
} }
private static final ConstructingObjectParser<SnapshotsStatusResponse, Void> PARSER = new ConstructingObjectParser<>(
"snapshots_status_response", true,
(Object[] parsedObjects) -> {
@SuppressWarnings("unchecked") List<SnapshotStatus> snapshots = (List<SnapshotStatus>) parsedObjects[0];
return new SnapshotsStatusResponse(snapshots);
}
);
static {
PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots"));
}
public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SnapshotsStatusResponse response = (SnapshotsStatusResponse) o;
return snapshots != null ? snapshots.equals(response.snapshots) : response.snapshots == null;
}
@Override
public int hashCode() {
return snapshots != null ? snapshots.hashCode() : 0;
}
} }

View File

@ -23,7 +23,6 @@ import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardCount;
@ -56,10 +55,11 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.IOException; import java.io.IOException;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.EnumSet; import java.util.EnumSet;
@ -1345,7 +1345,7 @@ public class IndexMetaData implements Diffable<IndexMetaData>, ToXContentFragmen
} }
Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null);
if (creationDate != null) { if (creationDate != null) {
DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC); ZonedDateTime creationDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(creationDate), ZoneOffset.UTC);
builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString()); builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString());
} }
return builder.build(); return builder.build();

View File

@ -73,11 +73,10 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.nio.file.Path; import java.nio.file.Path;
import java.time.Instant;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -383,7 +382,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
} }
if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) { if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis()); indexSettingsBuilder.put(SETTING_CREATION_DATE, Instant.now().toEpochMilli());
} }
indexSettingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, request.getProvidedName()); indexSettingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, request.getProvidedName());
indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID());

View File

@ -52,12 +52,6 @@ import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
public class DateFormatters { public class DateFormatters {
private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.optionalStart().appendZoneId().optionalEnd()
.optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
.optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder() private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder()
.appendZoneId() .appendZoneId()
.toFormatter(Locale.ROOT); .toFormatter(Locale.ROOT);
@ -70,12 +64,80 @@ public class DateFormatters {
.appendOffset("+HH:mm", "Z") .appendOffset("+HH:mm", "Z")
.toFormatter(Locale.ROOT); .toFormatter(Locale.ROOT);
private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.optionalStart().appendZoneId().optionalEnd()
.optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
.optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.optionalStart() .optionalStart()
.append(TIME_ZONE_FORMATTER) .append(TIME_ZONE_FORMATTER)
.optionalEnd() .optionalEnd()
.toFormatter(Locale.ROOT); .toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_1 = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalStart()
.append(TIME_ZONE_FORMATTER_WITHOUT_COLON)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalStart()
.append(TIME_ZONE_FORMATTER_WITH_COLON)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
.optionalStart()
.append(TIME_ZONE_FORMATTER_ZONE_ID)
.optionalEnd()
.optionalEnd()
.toFormatter(Locale.ROOT);
private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME =
new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2,
STRICT_DATE_OPTIONAL_TIME_FORMATTER_3);
private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
@ -258,7 +320,8 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER) .append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT)); .toFormatter(Locale.ROOT));
private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer,
new DateTimeFormatterBuilder()
.append(DATE_FORMATTER) .append(DATE_FORMATTER)
.parseLenient() .parseLenient()
.optionalStart() .optionalStart()
@ -560,14 +623,6 @@ public class DateFormatters {
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter( private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT)); DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT));
private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendLiteral("-")
.appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral('-')
.appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT);
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER); private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER);
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
@ -580,14 +635,6 @@ public class DateFormatters {
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter(Locale.ROOT)); .toFormatter(Locale.ROOT));
private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
.appendLiteral(':')
.appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
.toFormatter(Locale.ROOT);
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND = private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND =
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER); new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER);
@ -601,18 +648,6 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER) .append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT)); .toFormatter(Locale.ROOT));
private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
.append(STRICT_YEAR_MONTH_DAY_FORMATTER)
.optionalStart()
.appendLiteral('T')
.append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
.optionalStart()
.appendFraction(MILLI_OF_SECOND, 3, 3, true)
.optionalEnd()
.append(OPTIONAL_TIME_ZONE_FORMATTER)
.optionalEnd()
.toFormatter(Locale.ROOT));
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
new DateTimeFormatterBuilder() new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
@ -918,8 +953,8 @@ public class DateFormatters {
return forPattern(formats[0], locale); return forPattern(formats[0], locale);
} else { } else {
Collection<DateTimeFormatter> parsers = new LinkedHashSet<>(formats.length); Collection<DateTimeFormatter> parsers = new LinkedHashSet<>(formats.length);
for (int i = 0; i < formats.length; i++) { for (String format : formats) {
CompoundDateTimeFormatter dateTimeFormatter = forPattern(formats[i], locale); CompoundDateTimeFormatter dateTimeFormatter = forPattern(format, locale);
try { try {
parsers.addAll(Arrays.asList(dateTimeFormatter.parsers)); parsers.addAll(Arrays.asList(dateTimeFormatter.parsers));
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {

View File

@ -45,9 +45,10 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.RestActionListener; import org.elasticsearch.rest.action.RestActionListener;
import org.elasticsearch.rest.action.RestResponseListener; import org.elasticsearch.rest.action.RestResponseListener;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
@ -379,7 +380,7 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted()); table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted());
table.addCell(indexMetaData.getCreationDate()); table.addCell(indexMetaData.getCreationDate());
table.addCell(new DateTime(indexMetaData.getCreationDate(), DateTimeZone.UTC)); table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC));
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size()); table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size()); table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());

View File

@ -0,0 +1,70 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.status;
import java.io.IOException;
import java.util.function.Predicate;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.AbstractXContentTestCase;
public class SnapshotIndexShardStatusTests extends AbstractXContentTestCase<SnapshotIndexShardStatus> {
@Override
protected SnapshotIndexShardStatus createTestInstance() {
return createForIndex(randomAlphaOfLength(10));
}
protected SnapshotIndexShardStatus createForIndex(String indexName) {
ShardId shardId = new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomIntBetween(0, 500));
SnapshotIndexShardStage stage = randomFrom(SnapshotIndexShardStage.values());
SnapshotStats stats = new SnapshotStatsTests().createTestInstance();
String nodeId = randomAlphaOfLength(20);
String failure = null;
if (rarely()) {
failure = randomAlphaOfLength(200);
}
return new SnapshotIndexShardStatus(shardId, stage, stats, nodeId, failure);
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Do not place random fields in the root object since its fields correspond to shard names.
return String::isEmpty;
}
@Override
protected SnapshotIndexShardStatus doParseInstance(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
SnapshotIndexShardStatus status = SnapshotIndexShardStatus.fromXContent(parser, parser.currentName());
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return status;
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.status;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.test.AbstractXContentTestCase;
public class SnapshotIndexStatusTests extends AbstractXContentTestCase<SnapshotIndexStatus> {
@Override
protected SnapshotIndexStatus createTestInstance() {
String index = randomAlphaOfLength(10);
List<SnapshotIndexShardStatus> shardStatuses = new ArrayList<>();
SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
shardStatuses.add(builder.createForIndex(index));
}
return new SnapshotIndexStatus(index, shardStatuses);
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Do not place random fields in the root object or the shards field since their fields correspond to names.
return (s) -> s.isEmpty() || s.endsWith("shards");
}
@Override
protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
return status;
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.status;
import java.io.IOException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
public class SnapshotShardsStatsTests extends AbstractXContentTestCase<SnapshotShardsStats> {
@Override
protected SnapshotShardsStats createTestInstance() {
int initializingShards = randomInt();
int startedShards = randomInt();
int finalizingShards = randomInt();
int doneShards = randomInt();
int failedShards = randomInt();
int totalShards = randomInt();
return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
}
@Override
protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException {
return SnapshotShardsStats.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -0,0 +1,52 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.status;
import java.io.IOException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
public class SnapshotStatsTests extends AbstractXContentTestCase<SnapshotStats> {
@Override
protected SnapshotStats createTestInstance() {
long startTime = randomNonNegativeLong();
long time = randomNonNegativeLong();
int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE);
long incrementalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
long totalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
long processedSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount,
processedFileCount, incrementalSize, totalSize, processedSize);
}
@Override
protected SnapshotStats doParseInstance(XContentParser parser) throws IOException {
return SnapshotStats.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
}

View File

@ -21,16 +21,19 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.function.Predicate;
public class SnapshotStatusTests extends ESTestCase { public class SnapshotStatusTests extends AbstractXContentTestCase<SnapshotStatus> {
public void testToString() throws Exception { public void testToString() throws Exception {
@ -146,4 +149,39 @@ public class SnapshotStatusTests extends ESTestCase {
"}"; "}";
assertEquals(expected, status.toString()); assertEquals(expected, status.toString());
} }
@Override
protected SnapshotStatus createTestInstance() {
SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values());
String uuid = UUIDs.randomBase64UUID();
SnapshotId id = new SnapshotId("test-snap", uuid);
Snapshot snapshot = new Snapshot("test-repo", id);
SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
builder.createTestInstance();
List<SnapshotIndexShardStatus> snapshotIndexShardStatuses = new ArrayList<>();
for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
SnapshotIndexShardStatus snapshotIndexShardStatus = builder.createTestInstance();
snapshotIndexShardStatuses.add(snapshotIndexShardStatus);
}
boolean includeGlobalState = randomBoolean();
return new SnapshotStatus(snapshot, state, snapshotIndexShardStatuses, includeGlobalState);
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Do not place random fields in the indices field or shards field since their fields correspond to names.
return (s) -> s.endsWith("shards") || s.endsWith("indices");
}
@Override
protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException {
return SnapshotStatus.fromXContent(parser);
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
} }

View File

@ -0,0 +1,57 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.snapshots.status;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractXContentTestCase;
public class SnapshotsStatusResponseTests extends AbstractXContentTestCase<SnapshotsStatusResponse> {
@Override
protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException {
return SnapshotsStatusResponse.fromXContent(parser);
}
@Override
protected Predicate<String> getRandomFieldsExcludeFilter() {
// Do not place random fields in the indices field or shards field since their fields correspond to names.
return (s) -> s.endsWith("shards") || s.endsWith("indices");
}
@Override
protected boolean supportsUnknownFields() {
return true;
}
@Override
protected SnapshotsStatusResponse createTestInstance() {
SnapshotStatusTests statusBuilder = new SnapshotStatusTests();
List<SnapshotStatus> snapshotStatuses = new ArrayList<>();
for (int idx = 0; idx < randomIntBetween(0, 5); idx++) {
snapshotStatuses.add(statusBuilder.createTestInstance());
}
return new SnapshotsStatusResponse(snapshotStatuses);
}
}

View File

@ -25,16 +25,16 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -197,8 +197,8 @@ public class RolloverIT extends ESIntegTestCase {
} }
public void testRolloverWithDateMath() { public void testRolloverWithDateMath() {
DateTime now = new DateTime(DateTimeZone.UTC); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
String index = "test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-1"; String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "<test-{now/d}-1>"; String dateMathExp = "<test-{now/d}-1>";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get()); assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
ensureGreen(index); ensureGreen(index);
@ -212,14 +212,14 @@ public class RolloverIT extends ESIntegTestCase {
ensureGreen(index); ensureGreen(index);
RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo(index)); assertThat(response.getOldIndex(), equalTo(index));
assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002")); assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0)); assertThat(response.getConditionStatus().size(), equalTo(0));
response = client().admin().indices().prepareRolloverIndex("test_alias").get(); response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002")); assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003")); assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0)); assertThat(response.getConditionStatus().size(), equalTo(0));
@ -232,8 +232,8 @@ public class RolloverIT extends ESIntegTestCase {
IndexMetaData.SETTING_INDEX_PROVIDED_NAME)); IndexMetaData.SETTING_INDEX_PROVIDED_NAME));
response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("<test-{now/d}-000004>").get(); response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("<test-{now/d}-000004>").get();
assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003")); assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-000004")); assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0)); assertThat(response.getConditionStatus().size(), equalTo(0));

View File

@ -22,8 +22,10 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.elasticsearch.test.VersionUtils.randomVersion;
@ -42,6 +44,7 @@ public class HumanReadableIndexSettingsTests extends ESTestCase {
assertEquals(versionCreated.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_CREATED_STRING, null)); assertEquals(versionCreated.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_CREATED_STRING, null));
assertEquals(versionUpgraded.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_UPGRADED_STRING, null)); assertEquals(versionUpgraded.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_UPGRADED_STRING, null));
assertEquals(new DateTime(created, DateTimeZone.UTC).toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null)); ZonedDateTime creationDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(created), ZoneOffset.UTC);
assertEquals(creationDate.toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null));
} }
} }

View File

@ -23,7 +23,9 @@ import org.elasticsearch.common.time.CompoundDateTimeFormatter;
import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import java.time.ZonedDateTime; import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException; import java.time.format.DateTimeParseException;
@ -354,11 +356,109 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertParseException("2012-W1-1", "strict_weekyear_week_day"); assertParseException("2012-W1-1", "strict_weekyear_week_day");
} }
public void testSamePrinterOutput() {
int year = randomIntBetween(1970, 2030);
int month = randomIntBetween(1, 12);
int day = randomIntBetween(1, 28);
int hour = randomIntBetween(0, 23);
int minute = randomIntBetween(0, 59);
int second = randomIntBetween(0, 59);
ZonedDateTime javaDate = ZonedDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC);
DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC);
assertSamePrinterOutput("basicDate", javaDate, jodaDate);
assertSamePrinterOutput("basicDateTime", javaDate, jodaDate);
assertSamePrinterOutput("basicDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("basicOrdinalDate", javaDate, jodaDate);
assertSamePrinterOutput("basicOrdinalDateTime", javaDate, jodaDate);
assertSamePrinterOutput("basicOrdinalDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("basicTime", javaDate, jodaDate);
assertSamePrinterOutput("basicTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("basicTTime", javaDate, jodaDate);
assertSamePrinterOutput("basicTTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("basicWeekDate", javaDate, jodaDate);
assertSamePrinterOutput("basicWeekDateTime", javaDate, jodaDate);
assertSamePrinterOutput("basicWeekDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("date", javaDate, jodaDate);
assertSamePrinterOutput("dateHour", javaDate, jodaDate);
assertSamePrinterOutput("dateHourMinute", javaDate, jodaDate);
assertSamePrinterOutput("dateHourMinuteSecond", javaDate, jodaDate);
assertSamePrinterOutput("dateHourMinuteSecondFraction", javaDate, jodaDate);
assertSamePrinterOutput("dateHourMinuteSecondMillis", javaDate, jodaDate);
assertSamePrinterOutput("dateOptionalTime", javaDate, jodaDate);
assertSamePrinterOutput("dateTime", javaDate, jodaDate);
assertSamePrinterOutput("dateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("hour", javaDate, jodaDate);
assertSamePrinterOutput("hourMinute", javaDate, jodaDate);
assertSamePrinterOutput("hourMinuteSecond", javaDate, jodaDate);
assertSamePrinterOutput("hourMinuteSecondFraction", javaDate, jodaDate);
assertSamePrinterOutput("hourMinuteSecondMillis", javaDate, jodaDate);
assertSamePrinterOutput("ordinalDate", javaDate, jodaDate);
assertSamePrinterOutput("ordinalDateTime", javaDate, jodaDate);
assertSamePrinterOutput("ordinalDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("time", javaDate, jodaDate);
assertSamePrinterOutput("timeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("tTime", javaDate, jodaDate);
assertSamePrinterOutput("tTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("weekDate", javaDate, jodaDate);
assertSamePrinterOutput("weekDateTime", javaDate, jodaDate);
assertSamePrinterOutput("weekDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("weekyear", javaDate, jodaDate);
assertSamePrinterOutput("weekyearWeek", javaDate, jodaDate);
assertSamePrinterOutput("weekyearWeekDay", javaDate, jodaDate);
assertSamePrinterOutput("year", javaDate, jodaDate);
assertSamePrinterOutput("yearMonth", javaDate, jodaDate);
assertSamePrinterOutput("yearMonthDay", javaDate, jodaDate);
assertSamePrinterOutput("epoch_second", javaDate, jodaDate);
assertSamePrinterOutput("epoch_millis", javaDate, jodaDate);
assertSamePrinterOutput("strictBasicWeekDate", javaDate, jodaDate);
assertSamePrinterOutput("strictBasicWeekDateTime", javaDate, jodaDate);
assertSamePrinterOutput("strictBasicWeekDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictDate", javaDate, jodaDate);
assertSamePrinterOutput("strictDateHour", javaDate, jodaDate);
assertSamePrinterOutput("strictDateHourMinute", javaDate, jodaDate);
assertSamePrinterOutput("strictDateHourMinuteSecond", javaDate, jodaDate);
assertSamePrinterOutput("strictDateHourMinuteSecondFraction", javaDate, jodaDate);
assertSamePrinterOutput("strictDateHourMinuteSecondMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictDateOptionalTime", javaDate, jodaDate);
assertSamePrinterOutput("strictDateTime", javaDate, jodaDate);
assertSamePrinterOutput("strictDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictHour", javaDate, jodaDate);
assertSamePrinterOutput("strictHourMinute", javaDate, jodaDate);
assertSamePrinterOutput("strictHourMinuteSecond", javaDate, jodaDate);
assertSamePrinterOutput("strictHourMinuteSecondFraction", javaDate, jodaDate);
assertSamePrinterOutput("strictHourMinuteSecondMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictOrdinalDate", javaDate, jodaDate);
assertSamePrinterOutput("strictOrdinalDateTime", javaDate, jodaDate);
assertSamePrinterOutput("strictOrdinalDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictTime", javaDate, jodaDate);
assertSamePrinterOutput("strictTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictTTime", javaDate, jodaDate);
assertSamePrinterOutput("strictTTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekDate", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekDateTime", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekDateTimeNoMillis", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekyear", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekyearWeek", javaDate, jodaDate);
assertSamePrinterOutput("strictWeekyearWeekDay", javaDate, jodaDate);
assertSamePrinterOutput("strictYear", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
}
public void testSeveralTimeFormats() { public void testSeveralTimeFormats() {
assertSameDate("2018-12-12", "year_month_day||ordinal_date"); assertSameDate("2018-12-12", "year_month_day||ordinal_date");
assertSameDate("2018-128", "year_month_day||ordinal_date"); assertSameDate("2018-128", "year_month_day||ordinal_date");
} }
private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000));
String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate);
String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate);
assertThat(javaTimeOut, is(jodaTimeOut));
}
private void assertSameDate(String input, String format) { private void assertSameDate(String input, String format) {
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format); FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input); DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input);

View File

@ -28,12 +28,12 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -249,8 +249,9 @@ public class ExplainActionIT extends ESIntegTestCase {
public void testExplainDateRangeInQueryString() { public void testExplainDateRangeInQueryString() {
createIndex("test"); createIndex("test");
String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();

View File

@ -30,11 +30,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import org.joda.time.format.DateTimeFormat;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List; import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
@ -255,7 +255,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings).get()); .setSettings(settings).get());
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date") assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get()); .setSettings(settings).get());
DateTime now = new DateTime(ISOChronology.getInstanceUTC()); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now), indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)), client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)), client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
@ -456,9 +456,9 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings) .setSettings(settings)
.addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d"))) .addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")))
.get()); .get());
DateTime now = new DateTime(DateTimeZone.UTC); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at", client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at",
DateTimeFormat.forPattern("YYYY-MM-dd").print(now)).get(); DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get();
refresh(); refresh();
assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),

View File

@ -22,10 +22,10 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet; import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryBuilders;
@ -40,10 +40,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -123,8 +122,9 @@ public class MinDocCountIT extends AbstractTermsTestCase {
longTerm = randomInt(cardinality * 2); longTerm = randomInt(cardinality * 2);
} while (!longTerms.add(longTerm)); } while (!longTerms.add(longTerm));
double doubleTerm = longTerm * Math.PI; double doubleTerm = longTerm * Math.PI;
String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd")
.print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC)); ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time);
final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; ++j) { for (int j = 0; j < frequency; ++j) {
indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()

View File

@ -63,10 +63,10 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.MockKeywordPlugin; import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import java.io.IOException; import java.io.IOException;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
@ -2865,7 +2865,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
"field", "type=text,store=true,term_vector=with_positions_offsets") "field", "type=text,store=true,term_vector=with_positions_offsets")
.setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2))
.get()); .get());
DateTime now = new DateTime(ISOChronology.getInstanceUTC()); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"), indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"),
client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"), client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"),
client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world")); client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world"));

View File

@ -28,8 +28,8 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -48,8 +48,9 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.ReadableDateTime;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Base64; import java.util.Base64;
@ -546,6 +547,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("byte_field", (byte) 1) .field("byte_field", (byte) 1)
.field("short_field", (short) 2) .field("short_field", (short) 2)
@ -553,7 +555,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L) .field("long_field", 4L)
.field("float_field", 5.0f) .field("float_field", 5.0f)
.field("double_field", 6.0d) .field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC))) .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true) .field("boolean_field", true)
.field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8"))) .field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8")))
.endObject()).execute().actionGet(); .endObject()).execute().actionGet();
@ -578,7 +580,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
"float_field", "double_field", "date_field", "boolean_field", "binary_field"))); "float_field", "double_field", "date_field", "boolean_field", "binary_field")));
SearchHit searchHit = searchResponse.getHits().getAt(0); SearchHit searchHit = searchResponse.getHits().getAt(0);
assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1")); assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2")); assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2"));
@ -586,7 +587,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f)); assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f));
assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); String dateTime = DateFormatters.forPattern("dateOptionalTime").format(date);
assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime)); assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime));
assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE)); assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE));
assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8")))); assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8"))));
@ -756,7 +757,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC); ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("text_field", "foo") .field("text_field", "foo")
.field("keyword_field", "foo") .field("keyword_field", "foo")
@ -766,7 +767,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L) .field("long_field", 4L)
.field("float_field", 5.0f) .field("float_field", 5.0f)
.field("double_field", 6.0d) .field("double_field", 6.0d)
.field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date)) .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true) .field("boolean_field", true)
.field("binary_field", new byte[] {42, 100}) .field("binary_field", new byte[] {42, 100})
.field("ip_field", "::1") .field("ip_field", "::1")
@ -802,7 +803,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date)); assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
equalTo(new DateTime(date.toInstant().toEpochMilli(), DateTimeZone.UTC)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@ -839,7 +841,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
equalTo(Joda.forPattern("dateOptionalTime").printer().print(date))); equalTo(DateFormatters.forPattern("dateOptionalTime").format(date)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@ -869,7 +871,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
equalTo(Joda.forPattern("epoch_millis").printer().print(date))); equalTo(DateFormatters.forPattern("epoch_millis").format(date)));
} }
public void testScriptFields() throws Exception { public void testScriptFields() throws Exception {

View File

@ -43,9 +43,9 @@ import org.elasticsearch.search.SearchHits;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.VersionUtils;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -562,27 +562,27 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
} }
public void testDateWithoutOrigin() throws Exception { public void testDateWithoutOrigin() throws Exception {
DateTime dt = new DateTime(DateTimeZone.UTC); ZonedDateTime dt = ZonedDateTime.now(ZoneOffset.UTC);
assertAcked(prepareCreate("test").addMapping( assertAcked(prepareCreate("test").addMapping(
"type1", "type1",
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
.endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
DateTime docDate = dt.minusDays(1); ZonedDateTime docDate = dt.minusDays(1);
String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index( client().index(
indexRequest("test").type("type1").id("1") indexRequest("test").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet(); .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(2); docDate = dt.minusDays(2);
docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index( client().index(
indexRequest("test").type("type1").id("2") indexRequest("test").type("type1").id("2")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet(); .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(3); docDate = dt.minusDays(3);
docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index( client().index(
indexRequest("test").type("type1").id("3") indexRequest("test").type("type1").id("3")

View File

@ -52,6 +52,9 @@ import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat; import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException; import java.io.IOException;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Random; import java.util.Random;
@ -480,8 +483,9 @@ public class SearchQueryIT extends ESIntegTestCase {
"type", "past", "type=date", "future", "type=date" "type", "past", "type=date", "future", "type=date"
)); ));
String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
refresh(); refresh();

View File

@ -35,12 +35,13 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -124,8 +125,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
.put(indexSettings()) .put(indexSettings())
.put("index.number_of_shards", 1))); .put("index.number_of_shards", 1)));
String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS));
String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
@ -137,10 +139,10 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
assertNoFailures(response); assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay();
DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1); long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000;
assertThat(response.getQueryExplanation().get(0).getExplanation(), long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1;
equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]")); assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]"));
assertThat(response.isValid(), equalTo(true)); assertThat(response.isValid(), equalTo(true));
} }

View File

@ -21,12 +21,6 @@ package org.elasticsearch.test.rest;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader; import org.apache.http.message.BasicHeader;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.ssl.SSLContexts; import org.apache.http.ssl.SSLContexts;
@ -68,16 +62,12 @@ import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException; import java.security.cert.CertificateException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.sort; import static java.util.Collections.sort;
import static java.util.Collections.unmodifiableList; import static java.util.Collections.unmodifiableList;
import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.anyOf;
@ -307,25 +297,25 @@ public abstract class ESRestTestCase extends ESTestCase {
* the snapshots intact in the repository. * the snapshots intact in the repository.
*/ */
private void wipeSnapshots() throws IOException { private void wipeSnapshots() throws IOException {
for (Map.Entry<String, ?> repo : entityAsMap(adminClient.performRequest("GET", "_snapshot/_all")).entrySet()) { for (Map.Entry<String, ?> repo : entityAsMap(adminClient.performRequest(new Request("GET", "/_snapshot/_all"))).entrySet()) {
String repoName = repo.getKey(); String repoName = repo.getKey();
Map<?, ?> repoSpec = (Map<?, ?>) repo.getValue(); Map<?, ?> repoSpec = (Map<?, ?>) repo.getValue();
String repoType = (String) repoSpec.get("type"); String repoType = (String) repoSpec.get("type");
if (false == preserveSnapshotsUponCompletion() && repoType.equals("fs")) { if (false == preserveSnapshotsUponCompletion() && repoType.equals("fs")) {
// All other repo types we really don't have a chance of being able to iterate properly, sadly. // All other repo types we really don't have a chance of being able to iterate properly, sadly.
String url = "_snapshot/" + repoName + "/_all"; Request listRequest = new Request("GET", "/_snapshot/" + repoName + "/_all");
Map<String, String> params = singletonMap("ignore_unavailable", "true"); listRequest.addParameter("ignore_unavailable", "true");
List<?> snapshots = (List<?>) entityAsMap(adminClient.performRequest("GET", url, params)).get("snapshots"); List<?> snapshots = (List<?>) entityAsMap(adminClient.performRequest(listRequest)).get("snapshots");
for (Object snapshot : snapshots) { for (Object snapshot : snapshots) {
Map<?, ?> snapshotInfo = (Map<?, ?>) snapshot; Map<?, ?> snapshotInfo = (Map<?, ?>) snapshot;
String name = (String) snapshotInfo.get("snapshot"); String name = (String) snapshotInfo.get("snapshot");
logger.debug("wiping snapshot [{}/{}]", repoName, name); logger.debug("wiping snapshot [{}/{}]", repoName, name);
adminClient().performRequest("DELETE", "_snapshot/" + repoName + "/" + name); adminClient().performRequest(new Request("DELETE", "/_snapshot/" + repoName + "/" + name));
} }
} }
if (preserveReposUponCompletion() == false) { if (preserveReposUponCompletion() == false) {
logger.debug("wiping snapshot repository [{}]", repoName); logger.debug("wiping snapshot repository [{}]", repoName);
adminClient().performRequest("DELETE", "_snapshot/" + repoName); adminClient().performRequest(new Request("DELETE", "_snapshot/" + repoName));
} }
} }
} }
@ -334,7 +324,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* Remove any cluster settings. * Remove any cluster settings.
*/ */
private void wipeClusterSettings() throws IOException { private void wipeClusterSettings() throws IOException {
Map<?, ?> getResponse = entityAsMap(adminClient().performRequest("GET", "/_cluster/settings")); Map<?, ?> getResponse = entityAsMap(adminClient().performRequest(new Request("GET", "/_cluster/settings")));
boolean mustClear = false; boolean mustClear = false;
XContentBuilder clearCommand = JsonXContent.contentBuilder(); XContentBuilder clearCommand = JsonXContent.contentBuilder();
@ -355,8 +345,9 @@ public abstract class ESRestTestCase extends ESTestCase {
clearCommand.endObject(); clearCommand.endObject();
if (mustClear) { if (mustClear) {
adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity( Request request = new Request("PUT", "/_cluster/settings");
Strings.toString(clearCommand), ContentType.APPLICATION_JSON)); request.setJsonEntity(Strings.toString(clearCommand));
adminClient().performRequest(request);
} }
} }
@ -365,7 +356,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* other tests. * other tests.
*/ */
private void logIfThereAreRunningTasks() throws InterruptedException, IOException { private void logIfThereAreRunningTasks() throws InterruptedException, IOException {
Set<String> runningTasks = runningTasks(adminClient().performRequest("GET", "_tasks")); Set<String> runningTasks = runningTasks(adminClient().performRequest(new Request("GET", "/_tasks")));
// Ignore the task list API - it doesn't count against us // Ignore the task list API - it doesn't count against us
runningTasks.remove(ListTasksAction.NAME); runningTasks.remove(ListTasksAction.NAME);
runningTasks.remove(ListTasksAction.NAME + "[n]"); runningTasks.remove(ListTasksAction.NAME + "[n]");
@ -389,7 +380,7 @@ public abstract class ESRestTestCase extends ESTestCase {
private void waitForClusterStateUpdatesToFinish() throws Exception { private void waitForClusterStateUpdatesToFinish() throws Exception {
assertBusy(() -> { assertBusy(() -> {
try { try {
Response response = adminClient().performRequest("GET", "_cluster/pending_tasks"); Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks"));
List<?> tasks = (List<?>) entityAsMap(response).get("tasks"); List<?> tasks = (List<?>) entityAsMap(response).get("tasks");
if (false == tasks.isEmpty()) { if (false == tasks.isEmpty()) {
StringBuilder message = new StringBuilder("there are still running tasks:"); StringBuilder message = new StringBuilder("there are still running tasks:");
@ -514,12 +505,12 @@ public abstract class ESRestTestCase extends ESTestCase {
* @param index index to test for * @param index index to test for
**/ **/
protected static void ensureGreen(String index) throws IOException { protected static void ensureGreen(String index) throws IOException {
Map<String, String> params = new HashMap<>(); Request request = new Request("GET", "/_cluster/health/" + index);
params.put("wait_for_status", "green"); request.addParameter("wait_for_status", "green");
params.put("wait_for_no_relocating_shards", "true"); request.addParameter("wait_for_no_relocating_shards", "true");
params.put("timeout", "70s"); request.addParameter("timeout", "70s");
params.put("level", "shards"); request.addParameter("level", "shards");
assertOK(client().performRequest("GET", "_cluster/health/" + index, params)); client().performRequest(request);
} }
/** /**
@ -527,11 +518,11 @@ public abstract class ESRestTestCase extends ESTestCase {
* in the cluster and doesn't require to know how many nodes/replica there are. * in the cluster and doesn't require to know how many nodes/replica there are.
*/ */
protected static void ensureNoInitializingShards() throws IOException { protected static void ensureNoInitializingShards() throws IOException {
Map<String, String> params = new HashMap<>(); Request request = new Request("GET", "/_cluster/health");
params.put("wait_for_no_initializing_shards", "true"); request.addParameter("wait_for_no_initializing_shards", "true");
params.put("timeout", "70s"); request.addParameter("timeout", "70s");
params.put("level", "shards"); request.addParameter("level", "shards");
assertOK(client().performRequest("GET", "_cluster/health/", params)); client().performRequest(request);
} }
protected static void createIndex(String name, Settings settings) throws IOException { protected static void createIndex(String name, Settings settings) throws IOException {
@ -539,9 +530,10 @@ public abstract class ESRestTestCase extends ESTestCase {
} }
protected static void createIndex(String name, Settings settings, String mapping) throws IOException { protected static void createIndex(String name, Settings settings, String mapping) throws IOException {
assertOK(client().performRequest(HttpPut.METHOD_NAME, name, Collections.emptyMap(), Request request = new Request("PUT", "/" + name);
new StringEntity("{ \"settings\": " + Strings.toString(settings) request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings)
+ ", \"mappings\" : {" + mapping + "} }", ContentType.APPLICATION_JSON))); + ", \"mappings\" : {" + mapping + "} }");
client().performRequest(request);
} }
protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException { protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException {
@ -549,42 +541,42 @@ public abstract class ESRestTestCase extends ESTestCase {
} }
private static void updateIndexSettings(String index, Settings settings) throws IOException { private static void updateIndexSettings(String index, Settings settings) throws IOException {
assertOK(client().performRequest("PUT", index + "/_settings", Collections.emptyMap(), Request request = new Request("PUT", "/" + index + "/_settings");
new StringEntity(Strings.toString(settings), ContentType.APPLICATION_JSON))); request.setJsonEntity(Strings.toString(settings));
client().performRequest(request);
} }
protected static Map<String, Object> getIndexSettings(String index) throws IOException { protected static Map<String, Object> getIndexSettings(String index) throws IOException {
Map<String, String> params = new HashMap<>(); Request request = new Request("GET", "/" + index + "/_settings");
params.put("flat_settings", "true"); request.addParameter("flat_settings", "true");
Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_settings", params); Response response = client().performRequest(request);
assertOK(response);
try (InputStream is = response.getEntity().getContent()) { try (InputStream is = response.getEntity().getContent()) {
return XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); return XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true);
} }
} }
protected static boolean indexExists(String index) throws IOException { protected static boolean indexExists(String index) throws IOException {
Response response = client().performRequest(HttpHead.METHOD_NAME, index); Response response = client().performRequest(new Request("HEAD", "/" + index));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
} }
protected static void closeIndex(String index) throws IOException { protected static void closeIndex(String index) throws IOException {
Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_close"); Response response = client().performRequest(new Request("POST", "/" + index + "/_close"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
} }
protected static void openIndex(String index) throws IOException { protected static void openIndex(String index) throws IOException {
Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_open"); Response response = client().performRequest(new Request("POST", "/" + index + "/_open"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
} }
protected static boolean aliasExists(String alias) throws IOException { protected static boolean aliasExists(String alias) throws IOException {
Response response = client().performRequest(HttpHead.METHOD_NAME, "/_alias/" + alias); Response response = client().performRequest(new Request("HEAD", "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
} }
protected static boolean aliasExists(String index, String alias) throws IOException { protected static boolean aliasExists(String index, String alias) throws IOException {
Response response = client().performRequest(HttpHead.METHOD_NAME, "/" + index + "/_alias/" + alias); Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
} }
@ -602,7 +594,7 @@ public abstract class ESRestTestCase extends ESTestCase {
} }
protected static Map<String, Object> getAsMap(final String endpoint) throws IOException { protected static Map<String, Object> getAsMap(final String endpoint) throws IOException {
Response response = client().performRequest(HttpGet.METHOD_NAME, endpoint); Response response = client().performRequest(new Request("GET", endpoint));
XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue());
Map<String, Object> responseEntity = XContentHelper.convertToMap(entityContentType.xContent(), Map<String, Object> responseEntity = XContentHelper.convertToMap(entityContentType.xContent(),
response.getEntity().getContent(), false); response.getEntity().getContent(), false);

View File

@ -47,7 +47,6 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -282,7 +281,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
private static Tuple<Version, Version> readVersionsFromCatNodes(RestClient restClient) throws IOException { private static Tuple<Version, Version> readVersionsFromCatNodes(RestClient restClient) throws IOException {
// we simply go to the _cat/nodes API and parse all versions in the cluster // we simply go to the _cat/nodes API and parse all versions in the cluster
Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); Request request = new Request("GET", "/_cat/nodes");
request.addParameter("h", "version,master");
Response response = restClient.performRequest(request);
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
String nodesCatResponse = restTestResponse.getBodyAsString(); String nodesCatResponse = restTestResponse.getBodyAsString();
String[] split = nodesCatResponse.split("\n"); String[] split = nodesCatResponse.split("\n");
@ -310,7 +311,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
Version version = null; Version version = null;
for (int i = 0; i < numHosts; i++) { for (int i = 0; i < numHosts; i++) {
//we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster
Response response = restClient.performRequest("GET", "/"); Response response = restClient.performRequest(new Request("GET", "/"));
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
Object latestVersion = restTestResponse.evaluate("version.number"); Object latestVersion = restTestResponse.evaluate("version.number");
if (latestVersion == null) { if (latestVersion == null) {

View File

@ -353,7 +353,8 @@ public class DataDescription implements ToXContentObject, Writeable {
try { try {
DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC); DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format)); throw ExceptionsHelper.badRequestException(
Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format), e.getCause());
} }
} }
timeFormat = format; timeFormat = format;

View File

@ -54,9 +54,9 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
.parseDefaulting(ChronoField.YEAR_OF_ERA, LocalDate.now(defaultTimezone).getYear()) .parseDefaulting(ChronoField.YEAR_OF_ERA, LocalDate.now(defaultTimezone).getYear())
.toFormatter(); .toFormatter();
String now = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC)); String formattedTime = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC));
try { try {
TemporalAccessor parsed = formatter.parse(now); TemporalAccessor parsed = formatter.parse(formattedTime);
boolean hasTimeZone = parsed.isSupported(ChronoField.INSTANT_SECONDS); boolean hasTimeZone = parsed.isSupported(ChronoField.INSTANT_SECONDS);
if (hasTimeZone) { if (hasTimeZone) {
Instant.from(parsed); Instant.from(parsed);
@ -67,7 +67,7 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
return new DateTimeFormatterTimestampConverter(formatter, hasTimeZone, defaultTimezone); return new DateTimeFormatterTimestampConverter(formatter, hasTimeZone, defaultTimezone);
} }
catch (DateTimeException e) { catch (DateTimeException e) {
throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern); throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern, e);
} }
} }

View File

@ -17,6 +17,8 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat; import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat;
import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import java.time.DateTimeException;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -51,8 +53,12 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
description.setTimeFormat("epoch"); description.setTimeFormat("epoch");
description.setTimeFormat("epoch_ms"); description.setTimeFormat("epoch_ms");
description.setTimeFormat("yyyy-MM-dd HH"); description.setTimeFormat("yyyy-MM-dd HH");
String goodFormat = "yyyy.MM.dd G 'at' HH:mm:ss z"; }
description.setTimeFormat(goodFormat);
@AwaitsFix(bugUrl = "https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8206980")
public void testVerify_GivenValidFormat_Java11Bug() {
DataDescription.Builder description = new DataDescription.Builder();
description.setTimeFormat("yyyy.MM.dd G 'at' HH:mm:ss z");
} }
public void testVerify_GivenInValidFormat() { public void testVerify_GivenInValidFormat() {
@ -68,6 +74,10 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("y-M-dd")); e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("y-M-dd"));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage()); assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage());
expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY")); expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
Throwable cause = e.getCause();
assertNotNull(cause);
assertThat(cause, instanceOf(DateTimeException.class));
} }
public void testTransform_GivenDelimitedAndEpoch() { public void testTransform_GivenDelimitedAndEpoch() {

View File

@ -74,9 +74,14 @@ queryNoWith
: queryTerm : queryTerm
/** we could add sort by - sort per partition */ /** we could add sort by - sort per partition */
(ORDER BY orderBy (',' orderBy)*)? (ORDER BY orderBy (',' orderBy)*)?
(LIMIT limit=(INTEGER_VALUE | ALL))? limitClause?
; ;
limitClause
: LIMIT limit=(INTEGER_VALUE | ALL)
| LIMIT_ESC limit=(INTEGER_VALUE | ALL) ESC_END
;
queryTerm queryTerm
: querySpecification #queryPrimaryDefault : querySpecification #queryPrimaryDefault
| '(' queryNoWith ')' #subquery | '(' queryNoWith ')' #subquery
@ -185,7 +190,12 @@ predicate
; ;
pattern pattern
: value=string (ESCAPE escape=string)? : value=string patternEscape?
;
patternEscape
: ESCAPE escape=string
| ESCAPE_ESC escape=string '}'
; ;
valueExpression valueExpression
@ -197,18 +207,44 @@ valueExpression
; ;
primaryExpression primaryExpression
: CAST '(' expression AS dataType ')' #cast : castExpression #cast
| EXTRACT '(' field=identifier FROM valueExpression ')' #extract | extractExpression #extract
| constant #constantDefault | constant #constantDefault
| ASTERISK #star | ASTERISK #star
| (qualifiedName DOT)? ASTERISK #star | (qualifiedName DOT)? ASTERISK #star
| identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall | functionExpression #function
| '(' query ')' #subqueryExpression | '(' query ')' #subqueryExpression
| identifier #columnReference | identifier #columnReference
| qualifiedName #dereference | qualifiedName #dereference
| '(' expression ')' #parenthesizedExpression | '(' expression ')' #parenthesizedExpression
; ;
castExpression
: castTemplate
| FUNCTION_ESC castTemplate ESC_END
;
castTemplate
: CAST '(' expression AS dataType ')'
;
extractExpression
: extractTemplate
| FUNCTION_ESC extractTemplate ESC_END
;
extractTemplate
: EXTRACT '(' field=identifier FROM valueExpression ')'
;
functionExpression
: functionTemplate
| FUNCTION_ESC functionTemplate '}'
;
functionTemplate
: identifier '(' (setQuantifier? expression (',' expression)*)? ')'
;
constant constant
: NULL #nullLiteral : NULL #nullLiteral
@ -216,6 +252,10 @@ constant
| booleanValue #booleanLiteral | booleanValue #booleanLiteral
| STRING+ #stringLiteral | STRING+ #stringLiteral
| PARAM #paramLiteral | PARAM #paramLiteral
| DATE_ESC string ESC_END #dateEscapedLiteral
| TIME_ESC string ESC_END #timeEscapedLiteral
| TIMESTAMP_ESC string ESC_END #timestampEscapedLiteral
| GUID_ESC string ESC_END #guidEscapedLiteral
; ;
comparisonOperator comparisonOperator
@ -351,6 +391,18 @@ VERIFY: 'VERIFY';
WHERE: 'WHERE'; WHERE: 'WHERE';
WITH: 'WITH'; WITH: 'WITH';
// Escaped Sequence
ESCAPE_ESC: '{ESCAPE';
FUNCTION_ESC: '{FN';
LIMIT_ESC:'{LIMIT';
DATE_ESC: '{D';
TIME_ESC: '{T';
TIMESTAMP_ESC: '{TS';
// mapped to string literal
GUID_ESC: '{GUID';
ESC_END: '}';
EQ : '='; EQ : '=';
NEQ : '<>' | '!=' | '<=>'; NEQ : '<>' | '!=' | '<=>';
LT : '<'; LT : '<';

View File

@ -69,33 +69,41 @@ USING=68
VERIFY=69 VERIFY=69
WHERE=70 WHERE=70
WITH=71 WITH=71
EQ=72 ESCAPE_ESC=72
NEQ=73 FUNCTION_ESC=73
LT=74 LIMIT_ESC=74
LTE=75 DATE_ESC=75
GT=76 TIME_ESC=76
GTE=77 TIMESTAMP_ESC=77
PLUS=78 GUID_ESC=78
MINUS=79 ESC_END=79
ASTERISK=80 EQ=80
SLASH=81 NEQ=81
PERCENT=82 LT=82
CONCAT=83 LTE=83
DOT=84 GT=84
PARAM=85 GTE=85
STRING=86 PLUS=86
INTEGER_VALUE=87 MINUS=87
DECIMAL_VALUE=88 ASTERISK=88
IDENTIFIER=89 SLASH=89
DIGIT_IDENTIFIER=90 PERCENT=90
TABLE_IDENTIFIER=91 CONCAT=91
QUOTED_IDENTIFIER=92 DOT=92
BACKQUOTED_IDENTIFIER=93 PARAM=93
SIMPLE_COMMENT=94 STRING=94
BRACKETED_COMMENT=95 INTEGER_VALUE=95
WS=96 DECIMAL_VALUE=96
UNRECOGNIZED=97 IDENTIFIER=97
DELIMITER=98 DIGIT_IDENTIFIER=98
TABLE_IDENTIFIER=99
QUOTED_IDENTIFIER=100
BACKQUOTED_IDENTIFIER=101
SIMPLE_COMMENT=102
BRACKETED_COMMENT=103
WS=104
UNRECOGNIZED=105
DELIMITER=106
'('=1 '('=1
')'=2 ')'=2
','=3 ','=3
@ -167,16 +175,24 @@ DELIMITER=98
'VERIFY'=69 'VERIFY'=69
'WHERE'=70 'WHERE'=70
'WITH'=71 'WITH'=71
'='=72 '{ESCAPE'=72
'<'=74 '{FN'=73
'<='=75 '{LIMIT'=74
'>'=76 '{D'=75
'>='=77 '{T'=76
'+'=78 '{TS'=77
'-'=79 '{GUID'=78
'*'=80 '}'=79
'/'=81 '='=80
'%'=82 '<'=82
'||'=83 '<='=83
'.'=84 '>'=84
'?'=85 '>='=85
'+'=86
'-'=87
'*'=88
'/'=89
'%'=90
'||'=91
'.'=92
'?'=93

View File

@ -69,32 +69,40 @@ USING=68
VERIFY=69 VERIFY=69
WHERE=70 WHERE=70
WITH=71 WITH=71
EQ=72 ESCAPE_ESC=72
NEQ=73 FUNCTION_ESC=73
LT=74 LIMIT_ESC=74
LTE=75 DATE_ESC=75
GT=76 TIME_ESC=76
GTE=77 TIMESTAMP_ESC=77
PLUS=78 GUID_ESC=78
MINUS=79 ESC_END=79
ASTERISK=80 EQ=80
SLASH=81 NEQ=81
PERCENT=82 LT=82
CONCAT=83 LTE=83
DOT=84 GT=84
PARAM=85 GTE=85
STRING=86 PLUS=86
INTEGER_VALUE=87 MINUS=87
DECIMAL_VALUE=88 ASTERISK=88
IDENTIFIER=89 SLASH=89
DIGIT_IDENTIFIER=90 PERCENT=90
TABLE_IDENTIFIER=91 CONCAT=91
QUOTED_IDENTIFIER=92 DOT=92
BACKQUOTED_IDENTIFIER=93 PARAM=93
SIMPLE_COMMENT=94 STRING=94
BRACKETED_COMMENT=95 INTEGER_VALUE=95
WS=96 DECIMAL_VALUE=96
UNRECOGNIZED=97 IDENTIFIER=97
DIGIT_IDENTIFIER=98
TABLE_IDENTIFIER=99
QUOTED_IDENTIFIER=100
BACKQUOTED_IDENTIFIER=101
SIMPLE_COMMENT=102
BRACKETED_COMMENT=103
WS=104
UNRECOGNIZED=105
'('=1 '('=1
')'=2 ')'=2
','=3 ','=3
@ -166,16 +174,24 @@ UNRECOGNIZED=97
'VERIFY'=69 'VERIFY'=69
'WHERE'=70 'WHERE'=70
'WITH'=71 'WITH'=71
'='=72 '{ESCAPE'=72
'<'=74 '{FN'=73
'<='=75 '{LIMIT'=74
'>'=76 '{D'=75
'>='=77 '{T'=76
'+'=78 '{TS'=77
'-'=79 '{GUID'=78
'*'=80 '}'=79
'/'=81 '='=80
'%'=82 '<'=82
'||'=83 '<='=83
'.'=84 '>'=84
'?'=85 '>='=85
'+'=86
'-'=87
'*'=88
'/'=89
'%'=90
'||'=91
'.'=92
'?'=93

View File

@ -213,10 +213,11 @@ abstract class Verifier {
* Check validity of Aggregate/GroupBy. * Check validity of Aggregate/GroupBy.
* This rule is needed for multiple reasons: * This rule is needed for multiple reasons:
* 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar) * 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar)
* 2. the order/having might contain a non-grouped attribute. This is typically * 2. the ORDER BY/HAVING might contain a non-grouped attribute. This is typically
* caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved * caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved
* (because the expression gets resolved little by little without being pushed down, * (because the expression gets resolved little by little without being pushed down,
* without the Analyzer modifying anything. * without the Analyzer modifying anything.
* 2a. HAVING also requires an Aggregate function
* 3. composite agg (used for GROUP BY) allows ordering only on the group keys * 3. composite agg (used for GROUP BY) allows ordering only on the group keys
*/ */
private static boolean checkGroupBy(LogicalPlan p, Set<Failure> localFailures, private static boolean checkGroupBy(LogicalPlan p, Set<Failure> localFailures,
@ -244,7 +245,7 @@ abstract class Verifier {
} }
// make sure to compare attributes directly // make sure to compare attributes directly
if (Expressions.anyMatch(a.groupings(), if (Expressions.anyMatch(a.groupings(),
g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) { g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) {
return; return;
} }
@ -278,13 +279,14 @@ abstract class Verifier {
Map<Expression, Node<?>> missing = new LinkedHashMap<>(); Map<Expression, Node<?>> missing = new LinkedHashMap<>();
Expression condition = f.condition(); Expression condition = f.condition();
condition.collectFirstChildren(c -> checkGroupMatch(c, condition, a.groupings(), missing, functions)); // variation of checkGroupMatch customized for HAVING, which requires just aggregations
condition.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, condition, missing, functions));
if (!missing.isEmpty()) { if (!missing.isEmpty()) {
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY; String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
localFailures.add(fail(condition, "Cannot filter by non-grouped column" + plural + " %s, expected %s", localFailures.add(
Expressions.names(missing.keySet()), fail(condition, "Cannot filter HAVING on non-aggregate" + plural + " %s; consider using WHERE instead",
Expressions.names(a.groupings()))); Expressions.names(missing.keySet())));
groupingFailures.add(a); groupingFailures.add(a);
return false; return false;
} }
@ -294,6 +296,57 @@ abstract class Verifier {
} }
private static boolean checkGroupByHavingHasOnlyAggs(Expression e, Node<?> source,
Map<Expression, Node<?>> missing, Map<String, Function> functions) {
// resolve FunctionAttribute to backing functions
if (e instanceof FunctionAttribute) {
FunctionAttribute fa = (FunctionAttribute) e;
Function function = functions.get(fa.functionId());
// TODO: this should be handled by a different rule
if (function == null) {
return false;
}
e = function;
}
// scalar functions can be a binary tree
// first test the function against the grouping
// and if that fails, start unpacking hoping to find matches
if (e instanceof ScalarFunction) {
ScalarFunction sf = (ScalarFunction) e;
// unwrap function to find the base
for (Expression arg : sf.arguments()) {
arg.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, source, missing, functions));
}
return true;
} else if (e instanceof Score) {
// Score can't be used for having
missing.put(e, source);
return true;
}
// skip literals / foldable
if (e.foldable()) {
return true;
}
// skip aggs (allowed to refer to non-group columns)
if (Functions.isAggregate(e)) {
return true;
}
// left without leaves which have to match; that's a failure since everything should be based on an agg
if (e instanceof Attribute) {
missing.put(e, source);
return true;
}
return false;
}
// check whether plain columns specified in an agg are mentioned in the group-by // check whether plain columns specified in an agg are mentioned in the group-by
private static boolean checkGroupByAgg(LogicalPlan p, Set<Failure> localFailures, private static boolean checkGroupByAgg(LogicalPlan p, Set<Failure> localFailures,
Set<LogicalPlan> groupingFailures, Map<String, Function> functions) { Set<LogicalPlan> groupingFailures, Map<String, Function> functions) {

View File

@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.expression.Order;
import org.elasticsearch.xpack.sql.expression.ScalarSubquery; import org.elasticsearch.xpack.sql.expression.ScalarSubquery;
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.sql.expression.UnresolvedStar; import org.elasticsearch.xpack.sql.expression.UnresolvedStar;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add;
@ -48,14 +49,19 @@ import org.elasticsearch.xpack.sql.expression.regex.RLike;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractTemplateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionTemplateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GuidEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext;
@ -66,6 +72,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParamLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParamLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternEscapeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext;
@ -76,10 +83,16 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimeEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext;
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes; import org.elasticsearch.xpack.sql.type.DataTypes;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.ISODateTimeFormat;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.util.List; import java.util.List;
@ -222,17 +235,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} }
char escape = 0; char escape = 0;
String escapeString = string(ctx.escape); PatternEscapeContext escapeCtx = ctx.patternEscape();
String escapeString = escapeCtx == null ? null : string(escapeCtx.escape);
if (Strings.hasText(escapeString)) { if (Strings.hasText(escapeString)) {
// shouldn't happen but adding validation in case the string parsing gets wonky // shouldn't happen but adding validation in case the string parsing gets wonky
if (escapeString.length() > 1) { if (escapeString.length() > 1) {
throw new ParsingException(source(ctx.escape), "A character not a string required for escaping; found [{}]", escapeString); throw new ParsingException(source(escapeCtx), "A character not a string required for escaping; found [{}]", escapeString);
} else if (escapeString.length() == 1) { } else if (escapeString.length() == 1) {
escape = escapeString.charAt(0); escape = escapeString.charAt(0);
// these chars already have a meaning // these chars already have a meaning
if (escape == '*' || escape == '%' || escape == '_') { if (escape == '*' || escape == '%' || escape == '_') {
throw new ParsingException(source(ctx.escape), "Char [{}] cannot be used for escaping", escape); throw new ParsingException(source(escapeCtx.escape), "Char [{}] cannot be used for escaping", escape);
} }
// lastly validate that escape chars (if present) are followed by special chars // lastly validate that escape chars (if present) are followed by special chars
for (int i = 0; i < pattern.length(); i++) { for (int i = 0; i < pattern.length(); i++) {
@ -324,11 +338,6 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
ctx.DESC() != null ? Order.OrderDirection.DESC : Order.OrderDirection.ASC); ctx.DESC() != null ? Order.OrderDirection.DESC : Order.OrderDirection.ASC);
} }
@Override
public Object visitCast(CastContext ctx) {
return new Cast(source(ctx), expression(ctx.expression()), typedParsing(ctx.dataType(), DataType.class));
}
@Override @Override
public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) { public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) {
String type = visitIdentifier(ctx.identifier()).toLowerCase(Locale.ROOT); String type = visitIdentifier(ctx.identifier()).toLowerCase(Locale.ROOT);
@ -367,20 +376,32 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} }
} }
//
// Functions template
//
@Override @Override
public Object visitFunctionCall(FunctionCallContext ctx) { public Cast visitCastExpression(CastExpressionContext ctx) {
String name = visitIdentifier(ctx.identifier()); CastTemplateContext ctc = ctx.castTemplate();
boolean isDistinct = ctx.setQuantifier() != null && ctx.setQuantifier().DISTINCT() != null; return new Cast(source(ctc), expression(ctc.expression()), typedParsing(ctc.dataType(), DataType.class));
UnresolvedFunction.ResolutionType resolutionType =
isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(ctx.expression()));
} }
@Override @Override
public Object visitExtract(ExtractContext ctx) { public Function visitExtractExpression(ExtractExpressionContext ctx) {
String fieldString = visitIdentifier(ctx.field); ExtractTemplateContext template = ctx.extractTemplate();
return new UnresolvedFunction(source(ctx), fieldString, String fieldString = visitIdentifier(template.field);
UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(ctx.valueExpression()))); return new UnresolvedFunction(source(template), fieldString,
UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(template.valueExpression())));
}
@Override
public Function visitFunctionExpression(FunctionExpressionContext ctx) {
FunctionTemplateContext template = ctx.functionTemplate();
String name = visitIdentifier(template.identifier());
boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null;
UnresolvedFunction.ResolutionType resolutionType =
isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(template.expression()));
} }
@Override @Override
@ -445,12 +466,12 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} }
@Override @Override
public Object visitDecimalLiteral(DecimalLiteralContext ctx) { public Literal visitDecimalLiteral(DecimalLiteralContext ctx) {
return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE); return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE);
} }
@Override @Override
public Object visitIntegerLiteral(IntegerLiteralContext ctx) { public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
BigDecimal bigD = new BigDecimal(ctx.getText()); BigDecimal bigD = new BigDecimal(ctx.getText());
long value = bigD.longValueExact(); long value = bigD.longValueExact();
@ -463,7 +484,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
} }
@Override @Override
public Object visitParamLiteral(ParamLiteralContext ctx) { public Literal visitParamLiteral(ParamLiteralContext ctx) {
SqlTypedParamValue param = param(ctx.PARAM()); SqlTypedParamValue param = param(ctx.PARAM());
Location loc = source(ctx); Location loc = source(ctx);
if (param.value == null) { if (param.value == null) {
@ -522,4 +543,100 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
return params.get(token); return params.get(token);
} }
}
@Override
public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) {
String string = string(ctx.string());
Location loc = source(ctx);
// parse yyyy-MM-dd
DateTime dt = null;
try {
dt = ISODateTimeFormat.date().parseDateTime(string);
} catch(IllegalArgumentException ex) {
throw new ParsingException(loc, "Invalid date received; {}", ex.getMessage());
}
return new Literal(loc, dt, DataType.DATE);
}
@Override
public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) {
String string = string(ctx.string());
Location loc = source(ctx);
// parse HH:mm:ss
DateTime dt = null;
try {
dt = ISODateTimeFormat.hourMinuteSecond().parseDateTime(string);
} catch (IllegalArgumentException ex) {
throw new ParsingException(loc, "Invalid time received; {}", ex.getMessage());
}
throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well");
}
@Override
public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) {
String string = string(ctx.string());
Location loc = source(ctx);
// parse yyyy-mm-dd hh:mm:ss(.f...)
DateTime dt = null;
try {
DateTimeFormatter formatter = new DateTimeFormatterBuilder()
.append(ISODateTimeFormat.date())
.appendLiteral(" ")
.append(ISODateTimeFormat.hourMinuteSecondFraction())
.toFormatter();
dt = formatter.parseDateTime(string);
} catch (IllegalArgumentException ex) {
throw new ParsingException(loc, "Invalid timestamp received; {}", ex.getMessage());
}
return new Literal(loc, dt, DataType.DATE);
}
@Override
public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) {
String string = string(ctx.string());
Location loc = source(ctx.string());
// basic validation
String lowerCase = string.toLowerCase(Locale.ROOT);
// needs to be format nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn
// since the length is fixed, the validation happens on absolute values
// not pretty but it's fast and doesn't create any extra objects
String errorPrefix = "Invalid GUID, ";
if (lowerCase.length() != 36) {
throw new ParsingException(loc, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short");
}
int[] separatorPos = { 8, 13, 18, 23 };
for (int pos : separatorPos) {
if (lowerCase.charAt(pos) != '-') {
throw new ParsingException(loc, "{}expected group separator at offset [{}], found [{}]",
errorPrefix, pos, string.charAt(pos));
}
}
String HEXA = "0123456789abcdef";
for (int i = 0; i < lowerCase.length(); i++) {
// skip separators
boolean inspect = true;
for (int pos : separatorPos) {
if (i == pos) {
inspect = false;
break;
} else if (pos > i) {
break;
}
}
if (inspect && HEXA.indexOf(lowerCase.charAt(i)) < 0) {
throw new ParsingException(loc, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i));
}
}
return new Literal(source(ctx), string, DataType.KEYWORD);
}
}

View File

@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext;
@ -89,9 +90,13 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder {
plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class)); plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class));
} }
if (ctx.limit != null && ctx.INTEGER_VALUE() != null) { LimitClauseContext limitClause = ctx.limitClause();
plan = new Limit(source(ctx.limit), new Literal(source(ctx), if (limitClause != null) {
Integer.parseInt(ctx.limit.getText()), DataType.INTEGER), plan); Token limit = limitClause.limit;
if (limit != null && limitClause.INTEGER_VALUE() != null) {
plan = new Limit(source(limitClause), new Literal(source(limitClause),
Integer.parseInt(limit.getText()), DataType.INTEGER), plan);
}
} }
return plan; return plan;

View File

@ -1,8 +1,3 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// ANTLR GENERATED CODE: DO NOT EDIT // ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser; package org.elasticsearch.xpack.sql.parser;
@ -208,6 +203,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { } @Override public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -556,6 +563,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void exitPattern(SqlBaseParser.PatternContext ctx) { } @Override public void exitPattern(SqlBaseParser.PatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -657,13 +676,13 @@ class SqlBaseBaseListener implements SqlBaseListener {
* *
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx) { } @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { } @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -712,6 +731,78 @@ class SqlBaseBaseListener implements SqlBaseListener {
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } @Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -772,6 +863,54 @@ class SqlBaseBaseListener implements SqlBaseListener {
* <p>The default implementation does nothing.</p> * <p>The default implementation does nothing.</p>
*/ */
@Override public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { } @Override public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *

View File

@ -1,8 +1,3 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// ANTLR GENERATED CODE: DO NOT EDIT // ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser; package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
@ -128,6 +123,13 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
* {@link #visitChildren} on {@code ctx}.</p> * {@link #visitChildren} on {@code ctx}.</p>
*/ */
@Override public T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { return visitChildren(ctx); } @Override public T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLimitClause(SqlBaseParser.LimitClauseContext ctx) { return visitChildren(ctx); }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -331,6 +333,13 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
* {@link #visitChildren} on {@code ctx}.</p> * {@link #visitChildren} on {@code ctx}.</p>
*/ */
@Override public T visitPattern(SqlBaseParser.PatternContext ctx) { return visitChildren(ctx); } @Override public T visitPattern(SqlBaseParser.PatternContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { return visitChildren(ctx); }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -393,7 +402,7 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
* <p>The default implementation returns the result of calling * <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p> * {@link #visitChildren} on {@code ctx}.</p>
*/ */
@Override public T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { return visitChildren(ctx); } @Override public T visitFunction(SqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -422,6 +431,48 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
* {@link #visitChildren} on {@code ctx}.</p> * {@link #visitChildren} on {@code ctx}.</p>
*/ */
@Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } @Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCastExpression(SqlBaseParser.CastExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { return visitChildren(ctx); }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *
@ -457,6 +508,34 @@ class SqlBaseBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements SqlBa
* {@link #visitChildren} on {@code ctx}.</p> * {@link #visitChildren} on {@code ctx}.</p>
*/ */
@Override public T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { return visitChildren(ctx); } @Override public T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { return visitChildren(ctx); }
/** /**
* {@inheritDoc} * {@inheritDoc}
* *

View File

@ -1,15 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// ANTLR GENERATED CODE: DO NOT EDIT // ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser; package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream; import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
class SqlBaseLexer extends Lexer { class SqlBaseLexer extends Lexer {
@ -28,11 +26,13 @@ class SqlBaseLexer extends Lexer {
NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52, NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52,
PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59, PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59,
SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67, SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67,
USING=68, VERIFY=69, WHERE=70, WITH=71, EQ=72, NEQ=73, LT=74, LTE=75, USING=68, VERIFY=69, WHERE=70, WITH=71, ESCAPE_ESC=72, FUNCTION_ESC=73,
GT=76, GTE=77, PLUS=78, MINUS=79, ASTERISK=80, SLASH=81, PERCENT=82, CONCAT=83, LIMIT_ESC=74, DATE_ESC=75, TIME_ESC=76, TIMESTAMP_ESC=77, GUID_ESC=78,
DOT=84, PARAM=85, STRING=86, INTEGER_VALUE=87, DECIMAL_VALUE=88, IDENTIFIER=89, ESC_END=79, EQ=80, NEQ=81, LT=82, LTE=83, GT=84, GTE=85, PLUS=86, MINUS=87,
DIGIT_IDENTIFIER=90, TABLE_IDENTIFIER=91, QUOTED_IDENTIFIER=92, BACKQUOTED_IDENTIFIER=93, ASTERISK=88, SLASH=89, PERCENT=90, CONCAT=91, DOT=92, PARAM=93, STRING=94,
SIMPLE_COMMENT=94, BRACKETED_COMMENT=95, WS=96, UNRECOGNIZED=97; INTEGER_VALUE=95, DECIMAL_VALUE=96, IDENTIFIER=97, DIGIT_IDENTIFIER=98,
TABLE_IDENTIFIER=99, QUOTED_IDENTIFIER=100, BACKQUOTED_IDENTIFIER=101,
SIMPLE_COMMENT=102, BRACKETED_COMMENT=103, WS=104, UNRECOGNIZED=105;
public static String[] modeNames = { public static String[] modeNames = {
"DEFAULT_MODE" "DEFAULT_MODE"
}; };
@ -46,12 +46,13 @@ class SqlBaseLexer extends Lexer {
"LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED",
"OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE",
"QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT",
"TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC",
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC",
"CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
"IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING",
"BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER",
"BRACKETED_COMMENT", "WS", "UNRECOGNIZED" "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER",
"SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED"
}; };
private static final String[] _LITERAL_NAMES = { private static final String[] _LITERAL_NAMES = {
@ -65,8 +66,9 @@ class SqlBaseLexer extends Lexer {
"'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'",
"'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'",
"'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", "'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'",
"'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'", "'WHERE'", "'WITH'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'",
"'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" "'{TS'", "'{GUID'", "'}'", "'='", null, "'<'", "'<='", "'>'", "'>='",
"'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'"
}; };
private static final String[] _SYMBOLIC_NAMES = { private static final String[] _SYMBOLIC_NAMES = {
null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY",
@ -77,12 +79,13 @@ class SqlBaseLexer extends Lexer {
"LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED",
"OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE",
"QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT",
"TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC",
"LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC",
"CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS",
"IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING",
"BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER",
"UNRECOGNIZED" "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT",
"WS", "UNRECOGNIZED"
}; };
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
@ -139,7 +142,7 @@ class SqlBaseLexer extends Lexer {
public ATN getATN() { return _ATN; } public ATN getATN() { return _ATN; }
public static final String _serializedATN = public static final String _serializedATN =
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2c\u033b\b\1\4\2\t"+ "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u0370\b\1\4\2\t"+
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
@ -150,276 +153,293 @@ class SqlBaseLexer extends Lexer {
"\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+
"\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+
"\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+
"`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6"+ "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4h\th\4i\ti\4j\tj\4k\t"+
"\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3"+ "k\4l\tl\4m\tm\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7"+
"\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f"+ "\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t\3\t\3"+
"\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+ "\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r"+
"\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21"+ "\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20"+
"\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23"+ "\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21"+
"\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25"+ "\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23"+
"\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27"+ "\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25\3\25"+
"\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ "\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27"+
"\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+ "\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\31"+
"\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34"+ "\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32"+
"\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36"+ "\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34"+
"\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3"+ "\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36\3\37\3\37"+
"!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3"+ "\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!"+
"$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3("+ "\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3%\3%\3%\3%"+
"\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,"+ "\3%\3%\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3(\3)\3)\3)\3)\3)\3*\3"+
"\3,\3-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60"+ "*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3"+
"\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63"+ "-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60\3\61\3\61\3\61\3\61"+
"\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65"+ "\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\63"+
"\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67"+ "\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\65"+
"\3\67\3\67\3\67\3\67\38\38\38\38\38\38\39\39\39\39\39\39\3:\3:\3:\3:\3"+ "\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67"+
":\3:\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3"+ "\38\38\38\38\38\38\39\39\39\39\39\39\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;"+
">\3>\3>\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3"+ "\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3>\3>\3>\3?\3?\3?\3?"+
"B\3B\3B\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3"+ "\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3C\3C\3C\3C"+
"F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3J\3J\3J\3J\3J\3J\3"+ "\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G"+
"J\5J\u027b\nJ\3K\3K\3L\3L\3L\3M\3M\3N\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3"+ "\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3K\3K\3K"+
"S\3S\3T\3T\3T\3U\3U\3V\3V\3W\3W\3W\3W\7W\u029c\nW\fW\16W\u029f\13W\3W"+ "\3K\3K\3K\3K\3L\3L\3L\3M\3M\3M\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3Q"+
"\3W\3X\6X\u02a4\nX\rX\16X\u02a5\3Y\6Y\u02a9\nY\rY\16Y\u02aa\3Y\3Y\7Y\u02af"+ "\3Q\3R\3R\3R\3R\3R\3R\3R\5R\u02b0\nR\3S\3S\3T\3T\3T\3U\3U\3V\3V\3V\3W"+
"\nY\fY\16Y\u02b2\13Y\3Y\3Y\6Y\u02b6\nY\rY\16Y\u02b7\3Y\6Y\u02bb\nY\rY"+ "\3W\3X\3X\3Y\3Y\3Z\3Z\3[\3[\3\\\3\\\3\\\3]\3]\3^\3^\3_\3_\3_\3_\7_\u02d1"+
"\16Y\u02bc\3Y\3Y\7Y\u02c1\nY\fY\16Y\u02c4\13Y\5Y\u02c6\nY\3Y\3Y\3Y\3Y"+ "\n_\f_\16_\u02d4\13_\3_\3_\3`\6`\u02d9\n`\r`\16`\u02da\3a\6a\u02de\na"+
"\6Y\u02cc\nY\rY\16Y\u02cd\3Y\3Y\5Y\u02d2\nY\3Z\3Z\5Z\u02d6\nZ\3Z\3Z\3"+ "\ra\16a\u02df\3a\3a\7a\u02e4\na\fa\16a\u02e7\13a\3a\3a\6a\u02eb\na\ra"+
"Z\7Z\u02db\nZ\fZ\16Z\u02de\13Z\3[\3[\3[\3[\6[\u02e4\n[\r[\16[\u02e5\3"+ "\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+
"\\\3\\\3\\\3\\\6\\\u02ec\n\\\r\\\16\\\u02ed\3]\3]\3]\3]\7]\u02f4\n]\f"+ "\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+
"]\16]\u02f7\13]\3]\3]\3^\3^\3^\3^\7^\u02ff\n^\f^\16^\u0302\13^\3^\3^\3"+ "a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+
"_\3_\5_\u0308\n_\3_\6_\u030b\n_\r_\16_\u030c\3`\3`\3a\3a\3b\3b\3b\3b\7"+ "\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\3d\6d\u0321\nd\rd\16d\u0322\3e\3e\3"+
"b\u0317\nb\fb\16b\u031a\13b\3b\5b\u031d\nb\3b\5b\u0320\nb\3b\3b\3c\3c"+ "e\3e\7e\u0329\ne\fe\16e\u032c\13e\3e\3e\3f\3f\3f\3f\7f\u0334\nf\ff\16"+
"\3c\3c\3c\7c\u0329\nc\fc\16c\u032c\13c\3c\3c\3c\3c\3c\3d\6d\u0334\nd\r"+ "f\u0337\13f\3f\3f\3g\3g\5g\u033d\ng\3g\6g\u0340\ng\rg\16g\u0341\3h\3h"+
"d\16d\u0335\3d\3d\3e\3e\3\u032a\2f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n"+ "\3i\3i\3j\3j\3j\3j\7j\u034c\nj\fj\16j\u034f\13j\3j\5j\u0352\nj\3j\5j\u0355"+
"\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30"+ "\nj\3j\3j\3k\3k\3k\3k\3k\7k\u035e\nk\fk\16k\u0361\13k\3k\3k\3k\3k\3k\3"+
"/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.["+ "l\6l\u0369\nl\rl\16l\u036a\3l\3l\3m\3m\3\u035f\2n\3\3\5\4\7\5\t\6\13\7"+
"/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081B\u0083"+ "\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+
"C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097"+ ")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+
"M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab"+ ")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081"+
"W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd\2\u00bf"+ "B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+
"\2\u00c1\2\u00c3`\u00c5a\u00c7b\u00c9c\3\2\f\3\2))\4\2BBaa\5\2<<BBaa\3"+ "L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9"+
"\2$$\3\2bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u035d"+ "V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+
"\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2"+ "`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+
"\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2"+ "\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2<<BBaa\3\2$$\3\2"+
"\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2"+ "bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u0392\2\3\3"+
"\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2"+ "\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2"+
"\2\2\61\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3"+ "\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3"+
"\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2"+ "\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2"+
"\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2"+ "%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61"+
"U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3"+ "\3\2\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2\2"+
"\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2"+ "\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3\2\2\2\2I"+
"\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2"+ "\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2"+
"{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085"+ "\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2"+
"\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3\2\2\2\2o"+
"\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2"+
"\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2\2\u0085"+
"\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2"+ "\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d\3\2\2"+
"\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097"+ "\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2\2\2\u0097"+
"\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2"+ "\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\2\u009d\3\2\2\2\2\u009f\3\2\2"+
"\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9"+ "\2\2\u00a1\3\2\2\2\2\u00a3\3\2\2\2\2\u00a5\3\2\2\2\2\u00a7\3\2\2\2\2\u00a9"+
"\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2"+ "\3\2\2\2\2\u00ab\3\2\2\2\2\u00ad\3\2\2\2\2\u00af\3\2\2\2\2\u00b1\3\2\2"+
"\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb"+ "\2\2\u00b3\3\2\2\2\2\u00b5\3\2\2\2\2\u00b7\3\2\2\2\2\u00b9\3\2\2\2\2\u00bb"+
"\3\2\2\2\2\u00c3\3\2\2\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2"+ "\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2"+
"\2\3\u00cb\3\2\2\2\5\u00cd\3\2\2\2\7\u00cf\3\2\2\2\t\u00d1\3\2\2\2\13"+ "\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00d3"+
"\u00d3\3\2\2\2\r\u00d7\3\2\2\2\17\u00df\3\2\2\2\21\u00e8\3\2\2\2\23\u00ec"+ "\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\3\u00db\3\2\2"+
"\3\2\2\2\25\u00f0\3\2\2\2\27\u00f3\3\2\2\2\31\u00f7\3\2\2\2\33\u00ff\3"+ "\2\5\u00dd\3\2\2\2\7\u00df\3\2\2\2\t\u00e1\3\2\2\2\13\u00e3\3\2\2\2\r"+
"\2\2\2\35\u0102\3\2\2\2\37\u0107\3\2\2\2!\u010f\3\2\2\2#\u0118\3\2\2\2"+ "\u00e7\3\2\2\2\17\u00ef\3\2\2\2\21\u00f8\3\2\2\2\23\u00fc\3\2\2\2\25\u0100"+
"%\u0120\3\2\2\2\'\u0126\3\2\2\2)\u012b\3\2\2\2+\u0134\3\2\2\2-\u013d\3"+ "\3\2\2\2\27\u0103\3\2\2\2\31\u0107\3\2\2\2\33\u010f\3\2\2\2\35\u0112\3"+
"\2\2\2/\u0144\3\2\2\2\61\u014f\3\2\2\2\63\u0156\3\2\2\2\65\u015e\3\2\2"+ "\2\2\2\37\u0117\3\2\2\2!\u011f\3\2\2\2#\u0128\3\2\2\2%\u0130\3\2\2\2\'"+
"\2\67\u0166\3\2\2\29\u016c\3\2\2\2;\u0173\3\2\2\2=\u0178\3\2\2\2?\u017d"+ "\u0136\3\2\2\2)\u013b\3\2\2\2+\u0144\3\2\2\2-\u014d\3\2\2\2/\u0154\3\2"+
"\3\2\2\2A\u0187\3\2\2\2C\u0190\3\2\2\2E\u0196\3\2\2\2G\u019d\3\2\2\2I"+ "\2\2\61\u015f\3\2\2\2\63\u0166\3\2\2\2\65\u016e\3\2\2\2\67\u0176\3\2\2"+
"\u01a0\3\2\2\2K\u01a6\3\2\2\2M\u01a9\3\2\2\2O\u01ae\3\2\2\2Q\u01b3\3\2"+ "\29\u017c\3\2\2\2;\u0183\3\2\2\2=\u0188\3\2\2\2?\u018d\3\2\2\2A\u0197"+
"\2\2S\u01b8\3\2\2\2U\u01be\3\2\2\2W\u01c5\3\2\2\2Y\u01cb\3\2\2\2[\u01d3"+ "\3\2\2\2C\u01a0\3\2\2\2E\u01a6\3\2\2\2G\u01ad\3\2\2\2I\u01b0\3\2\2\2K"+
"\3\2\2\2]\u01d7\3\2\2\2_\u01dc\3\2\2\2a\u01df\3\2\2\2c\u01e9\3\2\2\2e"+ "\u01b6\3\2\2\2M\u01b9\3\2\2\2O\u01be\3\2\2\2Q\u01c3\3\2\2\2S\u01c8\3\2"+
"\u01ec\3\2\2\2g\u01f2\3\2\2\2i\u01f8\3\2\2\2k\u01ff\3\2\2\2m\u0208\3\2"+ "\2\2U\u01ce\3\2\2\2W\u01d5\3\2\2\2Y\u01db\3\2\2\2[\u01e3\3\2\2\2]\u01e7"+
"\2\2o\u020d\3\2\2\2q\u0213\3\2\2\2s\u0219\3\2\2\2u\u021f\3\2\2\2w\u0227"+ "\3\2\2\2_\u01ec\3\2\2\2a\u01ef\3\2\2\2c\u01f9\3\2\2\2e\u01fc\3\2\2\2g"+
"\3\2\2\2y\u022e\3\2\2\2{\u0233\3\2\2\2}\u0237\3\2\2\2\177\u023d\3\2\2"+ "\u0202\3\2\2\2i\u0208\3\2\2\2k\u020f\3\2\2\2m\u0218\3\2\2\2o\u021d\3\2"+
"\2\u0081\u0244\3\2\2\2\u0083\u0249\3\2\2\2\u0085\u024e\3\2\2\2\u0087\u0253"+ "\2\2q\u0223\3\2\2\2s\u0229\3\2\2\2u\u022f\3\2\2\2w\u0237\3\2\2\2y\u023e"+
"\3\2\2\2\u0089\u0259\3\2\2\2\u008b\u025f\3\2\2\2\u008d\u0266\3\2\2\2\u008f"+ "\3\2\2\2{\u0243\3\2\2\2}\u0247\3\2\2\2\177\u024d\3\2\2\2\u0081\u0254\3"+
"\u026c\3\2\2\2\u0091\u0271\3\2\2\2\u0093\u027a\3\2\2\2\u0095\u027c\3\2"+ "\2\2\2\u0083\u0259\3\2\2\2\u0085\u025e\3\2\2\2\u0087\u0263\3\2\2\2\u0089"+
"\2\2\u0097\u027e\3\2\2\2\u0099\u0281\3\2\2\2\u009b\u0283\3\2\2\2\u009d"+ "\u0269\3\2\2\2\u008b\u026f\3\2\2\2\u008d\u0276\3\2\2\2\u008f\u027c\3\2"+
"\u0286\3\2\2\2\u009f\u0288\3\2\2\2\u00a1\u028a\3\2\2\2\u00a3\u028c\3\2"+ "\2\2\u0091\u0281\3\2\2\2\u0093\u0289\3\2\2\2\u0095\u028d\3\2\2\2\u0097"+
"\2\2\u00a5\u028e\3\2\2\2\u00a7\u0290\3\2\2\2\u00a9\u0293\3\2\2\2\u00ab"+ "\u0294\3\2\2\2\u0099\u0297\3\2\2\2\u009b\u029a\3\2\2\2\u009d\u029e\3\2"+
"\u0295\3\2\2\2\u00ad\u0297\3\2\2\2\u00af\u02a3\3\2\2\2\u00b1\u02d1\3\2"+ "\2\2\u009f\u02a4\3\2\2\2\u00a1\u02a6\3\2\2\2\u00a3\u02af\3\2\2\2\u00a5"+
"\2\2\u00b3\u02d5\3\2\2\2\u00b5\u02df\3\2\2\2\u00b7\u02eb\3\2\2\2\u00b9"+ "\u02b1\3\2\2\2\u00a7\u02b3\3\2\2\2\u00a9\u02b6\3\2\2\2\u00ab\u02b8\3\2"+
"\u02ef\3\2\2\2\u00bb\u02fa\3\2\2\2\u00bd\u0305\3\2\2\2\u00bf\u030e\3\2"+ "\2\2\u00ad\u02bb\3\2\2\2\u00af\u02bd\3\2\2\2\u00b1\u02bf\3\2\2\2\u00b3"+
"\2\2\u00c1\u0310\3\2\2\2\u00c3\u0312\3\2\2\2\u00c5\u0323\3\2\2\2\u00c7"+ "\u02c1\3\2\2\2\u00b5\u02c3\3\2\2\2\u00b7\u02c5\3\2\2\2\u00b9\u02c8\3\2"+
"\u0333\3\2\2\2\u00c9\u0339\3\2\2\2\u00cb\u00cc\7*\2\2\u00cc\4\3\2\2\2"+ "\2\2\u00bb\u02ca\3\2\2\2\u00bd\u02cc\3\2\2\2\u00bf\u02d8\3\2\2\2\u00c1"+
"\u00cd\u00ce\7+\2\2\u00ce\6\3\2\2\2\u00cf\u00d0\7.\2\2\u00d0\b\3\2\2\2"+ "\u0306\3\2\2\2\u00c3\u030a\3\2\2\2\u00c5\u0314\3\2\2\2\u00c7\u0320\3\2"+
"\u00d1\u00d2\7<\2\2\u00d2\n\3\2\2\2\u00d3\u00d4\7C\2\2\u00d4\u00d5\7N"+ "\2\2\u00c9\u0324\3\2\2\2\u00cb\u032f\3\2\2\2\u00cd\u033a\3\2\2\2\u00cf"+
"\2\2\u00d5\u00d6\7N\2\2\u00d6\f\3\2\2\2\u00d7\u00d8\7C\2\2\u00d8\u00d9"+ "\u0343\3\2\2\2\u00d1\u0345\3\2\2\2\u00d3\u0347\3\2\2\2\u00d5\u0358\3\2"+
"\7P\2\2\u00d9\u00da\7C\2\2\u00da\u00db\7N\2\2\u00db\u00dc\7[\2\2\u00dc"+ "\2\2\u00d7\u0368\3\2\2\2\u00d9\u036e\3\2\2\2\u00db\u00dc\7*\2\2\u00dc"+
"\u00dd\7\\\2\2\u00dd\u00de\7G\2\2\u00de\16\3\2\2\2\u00df\u00e0\7C\2\2"+ "\4\3\2\2\2\u00dd\u00de\7+\2\2\u00de\6\3\2\2\2\u00df\u00e0\7.\2\2\u00e0"+
"\u00e0\u00e1\7P\2\2\u00e1\u00e2\7C\2\2\u00e2\u00e3\7N\2\2\u00e3\u00e4"+ "\b\3\2\2\2\u00e1\u00e2\7<\2\2\u00e2\n\3\2\2\2\u00e3\u00e4\7C\2\2\u00e4"+
"\7[\2\2\u00e4\u00e5\7\\\2\2\u00e5\u00e6\7G\2\2\u00e6\u00e7\7F\2\2\u00e7"+ "\u00e5\7N\2\2\u00e5\u00e6\7N\2\2\u00e6\f\3\2\2\2\u00e7\u00e8\7C\2\2\u00e8"+
"\20\3\2\2\2\u00e8\u00e9\7C\2\2\u00e9\u00ea\7P\2\2\u00ea\u00eb\7F\2\2\u00eb"+ "\u00e9\7P\2\2\u00e9\u00ea\7C\2\2\u00ea\u00eb\7N\2\2\u00eb\u00ec\7[\2\2"+
"\22\3\2\2\2\u00ec\u00ed\7C\2\2\u00ed\u00ee\7P\2\2\u00ee\u00ef\7[\2\2\u00ef"+ "\u00ec\u00ed\7\\\2\2\u00ed\u00ee\7G\2\2\u00ee\16\3\2\2\2\u00ef\u00f0\7"+
"\24\3\2\2\2\u00f0\u00f1\7C\2\2\u00f1\u00f2\7U\2\2\u00f2\26\3\2\2\2\u00f3"+ "C\2\2\u00f0\u00f1\7P\2\2\u00f1\u00f2\7C\2\2\u00f2\u00f3\7N\2\2\u00f3\u00f4"+
"\u00f4\7C\2\2\u00f4\u00f5\7U\2\2\u00f5\u00f6\7E\2\2\u00f6\30\3\2\2\2\u00f7"+ "\7[\2\2\u00f4\u00f5\7\\\2\2\u00f5\u00f6\7G\2\2\u00f6\u00f7\7F\2\2\u00f7"+
"\u00f8\7D\2\2\u00f8\u00f9\7G\2\2\u00f9\u00fa\7V\2\2\u00fa\u00fb\7Y\2\2"+ "\20\3\2\2\2\u00f8\u00f9\7C\2\2\u00f9\u00fa\7P\2\2\u00fa\u00fb\7F\2\2\u00fb"+
"\u00fb\u00fc\7G\2\2\u00fc\u00fd\7G\2\2\u00fd\u00fe\7P\2\2\u00fe\32\3\2"+ "\22\3\2\2\2\u00fc\u00fd\7C\2\2\u00fd\u00fe\7P\2\2\u00fe\u00ff\7[\2\2\u00ff"+
"\2\2\u00ff\u0100\7D\2\2\u0100\u0101\7[\2\2\u0101\34\3\2\2\2\u0102\u0103"+ "\24\3\2\2\2\u0100\u0101\7C\2\2\u0101\u0102\7U\2\2\u0102\26\3\2\2\2\u0103"+
"\7E\2\2\u0103\u0104\7C\2\2\u0104\u0105\7U\2\2\u0105\u0106\7V\2\2\u0106"+ "\u0104\7C\2\2\u0104\u0105\7U\2\2\u0105\u0106\7E\2\2\u0106\30\3\2\2\2\u0107"+
"\36\3\2\2\2\u0107\u0108\7E\2\2\u0108\u0109\7C\2\2\u0109\u010a\7V\2\2\u010a"+ "\u0108\7D\2\2\u0108\u0109\7G\2\2\u0109\u010a\7V\2\2\u010a\u010b\7Y\2\2"+
"\u010b\7C\2\2\u010b\u010c\7N\2\2\u010c\u010d\7Q\2\2\u010d\u010e\7I\2\2"+ "\u010b\u010c\7G\2\2\u010c\u010d\7G\2\2\u010d\u010e\7P\2\2\u010e\32\3\2"+
"\u010e \3\2\2\2\u010f\u0110\7E\2\2\u0110\u0111\7C\2\2\u0111\u0112\7V\2"+ "\2\2\u010f\u0110\7D\2\2\u0110\u0111\7[\2\2\u0111\34\3\2\2\2\u0112\u0113"+
"\2\u0112\u0113\7C\2\2\u0113\u0114\7N\2\2\u0114\u0115\7Q\2\2\u0115\u0116"+ "\7E\2\2\u0113\u0114\7C\2\2\u0114\u0115\7U\2\2\u0115\u0116\7V\2\2\u0116"+
"\7I\2\2\u0116\u0117\7U\2\2\u0117\"\3\2\2\2\u0118\u0119\7E\2\2\u0119\u011a"+ "\36\3\2\2\2\u0117\u0118\7E\2\2\u0118\u0119\7C\2\2\u0119\u011a\7V\2\2\u011a"+
"\7Q\2\2\u011a\u011b\7N\2\2\u011b\u011c\7W\2\2\u011c\u011d\7O\2\2\u011d"+ "\u011b\7C\2\2\u011b\u011c\7N\2\2\u011c\u011d\7Q\2\2\u011d\u011e\7I\2\2"+
"\u011e\7P\2\2\u011e\u011f\7U\2\2\u011f$\3\2\2\2\u0120\u0121\7F\2\2\u0121"+ "\u011e \3\2\2\2\u011f\u0120\7E\2\2\u0120\u0121\7C\2\2\u0121\u0122\7V\2"+
"\u0122\7G\2\2\u0122\u0123\7D\2\2\u0123\u0124\7W\2\2\u0124\u0125\7I\2\2"+ "\2\u0122\u0123\7C\2\2\u0123\u0124\7N\2\2\u0124\u0125\7Q\2\2\u0125\u0126"+
"\u0125&\3\2\2\2\u0126\u0127\7F\2\2\u0127\u0128\7G\2\2\u0128\u0129\7U\2"+ "\7I\2\2\u0126\u0127\7U\2\2\u0127\"\3\2\2\2\u0128\u0129\7E\2\2\u0129\u012a"+
"\2\u0129\u012a\7E\2\2\u012a(\3\2\2\2\u012b\u012c\7F\2\2\u012c\u012d\7"+ "\7Q\2\2\u012a\u012b\7N\2\2\u012b\u012c\7W\2\2\u012c\u012d\7O\2\2\u012d"+
"G\2\2\u012d\u012e\7U\2\2\u012e\u012f\7E\2\2\u012f\u0130\7T\2\2\u0130\u0131"+ "\u012e\7P\2\2\u012e\u012f\7U\2\2\u012f$\3\2\2\2\u0130\u0131\7F\2\2\u0131"+
"\7K\2\2\u0131\u0132\7D\2\2\u0132\u0133\7G\2\2\u0133*\3\2\2\2\u0134\u0135"+ "\u0132\7G\2\2\u0132\u0133\7D\2\2\u0133\u0134\7W\2\2\u0134\u0135\7I\2\2"+
"\7F\2\2\u0135\u0136\7K\2\2\u0136\u0137\7U\2\2\u0137\u0138\7V\2\2\u0138"+ "\u0135&\3\2\2\2\u0136\u0137\7F\2\2\u0137\u0138\7G\2\2\u0138\u0139\7U\2"+
"\u0139\7K\2\2\u0139\u013a\7P\2\2\u013a\u013b\7E\2\2\u013b\u013c\7V\2\2"+ "\2\u0139\u013a\7E\2\2\u013a(\3\2\2\2\u013b\u013c\7F\2\2\u013c\u013d\7"+
"\u013c,\3\2\2\2\u013d\u013e\7G\2\2\u013e\u013f\7U\2\2\u013f\u0140\7E\2"+ "G\2\2\u013d\u013e\7U\2\2\u013e\u013f\7E\2\2\u013f\u0140\7T\2\2\u0140\u0141"+
"\2\u0140\u0141\7C\2\2\u0141\u0142\7R\2\2\u0142\u0143\7G\2\2\u0143.\3\2"+ "\7K\2\2\u0141\u0142\7D\2\2\u0142\u0143\7G\2\2\u0143*\3\2\2\2\u0144\u0145"+
"\2\2\u0144\u0145\7G\2\2\u0145\u0146\7Z\2\2\u0146\u0147\7G\2\2\u0147\u0148"+ "\7F\2\2\u0145\u0146\7K\2\2\u0146\u0147\7U\2\2\u0147\u0148\7V\2\2\u0148"+
"\7E\2\2\u0148\u0149\7W\2\2\u0149\u014a\7V\2\2\u014a\u014b\7C\2\2\u014b"+ "\u0149\7K\2\2\u0149\u014a\7P\2\2\u014a\u014b\7E\2\2\u014b\u014c\7V\2\2"+
"\u014c\7D\2\2\u014c\u014d\7N\2\2\u014d\u014e\7G\2\2\u014e\60\3\2\2\2\u014f"+ "\u014c,\3\2\2\2\u014d\u014e\7G\2\2\u014e\u014f\7U\2\2\u014f\u0150\7E\2"+
"\u0150\7G\2\2\u0150\u0151\7Z\2\2\u0151\u0152\7K\2\2\u0152\u0153\7U\2\2"+ "\2\u0150\u0151\7C\2\2\u0151\u0152\7R\2\2\u0152\u0153\7G\2\2\u0153.\3\2"+
"\u0153\u0154\7V\2\2\u0154\u0155\7U\2\2\u0155\62\3\2\2\2\u0156\u0157\7"+ "\2\2\u0154\u0155\7G\2\2\u0155\u0156\7Z\2\2\u0156\u0157\7G\2\2\u0157\u0158"+
"G\2\2\u0157\u0158\7Z\2\2\u0158\u0159\7R\2\2\u0159\u015a\7N\2\2\u015a\u015b"+ "\7E\2\2\u0158\u0159\7W\2\2\u0159\u015a\7V\2\2\u015a\u015b\7C\2\2\u015b"+
"\7C\2\2\u015b\u015c\7K\2\2\u015c\u015d\7P\2\2\u015d\64\3\2\2\2\u015e\u015f"+ "\u015c\7D\2\2\u015c\u015d\7N\2\2\u015d\u015e\7G\2\2\u015e\60\3\2\2\2\u015f"+
"\7G\2\2\u015f\u0160\7Z\2\2\u0160\u0161\7V\2\2\u0161\u0162\7T\2\2\u0162"+ "\u0160\7G\2\2\u0160\u0161\7Z\2\2\u0161\u0162\7K\2\2\u0162\u0163\7U\2\2"+
"\u0163\7C\2\2\u0163\u0164\7E\2\2\u0164\u0165\7V\2\2\u0165\66\3\2\2\2\u0166"+ "\u0163\u0164\7V\2\2\u0164\u0165\7U\2\2\u0165\62\3\2\2\2\u0166\u0167\7"+
"\u0167\7H\2\2\u0167\u0168\7C\2\2\u0168\u0169\7N\2\2\u0169\u016a\7U\2\2"+ "G\2\2\u0167\u0168\7Z\2\2\u0168\u0169\7R\2\2\u0169\u016a\7N\2\2\u016a\u016b"+
"\u016a\u016b\7G\2\2\u016b8\3\2\2\2\u016c\u016d\7H\2\2\u016d\u016e\7Q\2"+ "\7C\2\2\u016b\u016c\7K\2\2\u016c\u016d\7P\2\2\u016d\64\3\2\2\2\u016e\u016f"+
"\2\u016e\u016f\7T\2\2\u016f\u0170\7O\2\2\u0170\u0171\7C\2\2\u0171\u0172"+ "\7G\2\2\u016f\u0170\7Z\2\2\u0170\u0171\7V\2\2\u0171\u0172\7T\2\2\u0172"+
"\7V\2\2\u0172:\3\2\2\2\u0173\u0174\7H\2\2\u0174\u0175\7T\2\2\u0175\u0176"+ "\u0173\7C\2\2\u0173\u0174\7E\2\2\u0174\u0175\7V\2\2\u0175\66\3\2\2\2\u0176"+
"\7Q\2\2\u0176\u0177\7O\2\2\u0177<\3\2\2\2\u0178\u0179\7H\2\2\u0179\u017a"+ "\u0177\7H\2\2\u0177\u0178\7C\2\2\u0178\u0179\7N\2\2\u0179\u017a\7U\2\2"+
"\7W\2\2\u017a\u017b\7N\2\2\u017b\u017c\7N\2\2\u017c>\3\2\2\2\u017d\u017e"+ "\u017a\u017b\7G\2\2\u017b8\3\2\2\2\u017c\u017d\7H\2\2\u017d\u017e\7Q\2"+
"\7H\2\2\u017e\u017f\7W\2\2\u017f\u0180\7P\2\2\u0180\u0181\7E\2\2\u0181"+ "\2\u017e\u017f\7T\2\2\u017f\u0180\7O\2\2\u0180\u0181\7C\2\2\u0181\u0182"+
"\u0182\7V\2\2\u0182\u0183\7K\2\2\u0183\u0184\7Q\2\2\u0184\u0185\7P\2\2"+ "\7V\2\2\u0182:\3\2\2\2\u0183\u0184\7H\2\2\u0184\u0185\7T\2\2\u0185\u0186"+
"\u0185\u0186\7U\2\2\u0186@\3\2\2\2\u0187\u0188\7I\2\2\u0188\u0189\7T\2"+ "\7Q\2\2\u0186\u0187\7O\2\2\u0187<\3\2\2\2\u0188\u0189\7H\2\2\u0189\u018a"+
"\2\u0189\u018a\7C\2\2\u018a\u018b\7R\2\2\u018b\u018c\7J\2\2\u018c\u018d"+ "\7W\2\2\u018a\u018b\7N\2\2\u018b\u018c\7N\2\2\u018c>\3\2\2\2\u018d\u018e"+
"\7X\2\2\u018d\u018e\7K\2\2\u018e\u018f\7\\\2\2\u018fB\3\2\2\2\u0190\u0191"+ "\7H\2\2\u018e\u018f\7W\2\2\u018f\u0190\7P\2\2\u0190\u0191\7E\2\2\u0191"+
"\7I\2\2\u0191\u0192\7T\2\2\u0192\u0193\7Q\2\2\u0193\u0194\7W\2\2\u0194"+ "\u0192\7V\2\2\u0192\u0193\7K\2\2\u0193\u0194\7Q\2\2\u0194\u0195\7P\2\2"+
"\u0195\7R\2\2\u0195D\3\2\2\2\u0196\u0197\7J\2\2\u0197\u0198\7C\2\2\u0198"+ "\u0195\u0196\7U\2\2\u0196@\3\2\2\2\u0197\u0198\7I\2\2\u0198\u0199\7T\2"+
"\u0199\7X\2\2\u0199\u019a\7K\2\2\u019a\u019b\7P\2\2\u019b\u019c\7I\2\2"+ "\2\u0199\u019a\7C\2\2\u019a\u019b\7R\2\2\u019b\u019c\7J\2\2\u019c\u019d"+
"\u019cF\3\2\2\2\u019d\u019e\7K\2\2\u019e\u019f\7P\2\2\u019fH\3\2\2\2\u01a0"+ "\7X\2\2\u019d\u019e\7K\2\2\u019e\u019f\7\\\2\2\u019fB\3\2\2\2\u01a0\u01a1"+
"\u01a1\7K\2\2\u01a1\u01a2\7P\2\2\u01a2\u01a3\7P\2\2\u01a3\u01a4\7G\2\2"+ "\7I\2\2\u01a1\u01a2\7T\2\2\u01a2\u01a3\7Q\2\2\u01a3\u01a4\7W\2\2\u01a4"+
"\u01a4\u01a5\7T\2\2\u01a5J\3\2\2\2\u01a6\u01a7\7K\2\2\u01a7\u01a8\7U\2"+ "\u01a5\7R\2\2\u01a5D\3\2\2\2\u01a6\u01a7\7J\2\2\u01a7\u01a8\7C\2\2\u01a8"+
"\2\u01a8L\3\2\2\2\u01a9\u01aa\7L\2\2\u01aa\u01ab\7Q\2\2\u01ab\u01ac\7"+ "\u01a9\7X\2\2\u01a9\u01aa\7K\2\2\u01aa\u01ab\7P\2\2\u01ab\u01ac\7I\2\2"+
"K\2\2\u01ac\u01ad\7P\2\2\u01adN\3\2\2\2\u01ae\u01af\7N\2\2\u01af\u01b0"+ "\u01acF\3\2\2\2\u01ad\u01ae\7K\2\2\u01ae\u01af\7P\2\2\u01afH\3\2\2\2\u01b0"+
"\7G\2\2\u01b0\u01b1\7H\2\2\u01b1\u01b2\7V\2\2\u01b2P\3\2\2\2\u01b3\u01b4"+ "\u01b1\7K\2\2\u01b1\u01b2\7P\2\2\u01b2\u01b3\7P\2\2\u01b3\u01b4\7G\2\2"+
"\7N\2\2\u01b4\u01b5\7K\2\2\u01b5\u01b6\7M\2\2\u01b6\u01b7\7G\2\2\u01b7"+ "\u01b4\u01b5\7T\2\2\u01b5J\3\2\2\2\u01b6\u01b7\7K\2\2\u01b7\u01b8\7U\2"+
"R\3\2\2\2\u01b8\u01b9\7N\2\2\u01b9\u01ba\7K\2\2\u01ba\u01bb\7O\2\2\u01bb"+ "\2\u01b8L\3\2\2\2\u01b9\u01ba\7L\2\2\u01ba\u01bb\7Q\2\2\u01bb\u01bc\7"+
"\u01bc\7K\2\2\u01bc\u01bd\7V\2\2\u01bdT\3\2\2\2\u01be\u01bf\7O\2\2\u01bf"+ "K\2\2\u01bc\u01bd\7P\2\2\u01bdN\3\2\2\2\u01be\u01bf\7N\2\2\u01bf\u01c0"+
"\u01c0\7C\2\2\u01c0\u01c1\7R\2\2\u01c1\u01c2\7R\2\2\u01c2\u01c3\7G\2\2"+ "\7G\2\2\u01c0\u01c1\7H\2\2\u01c1\u01c2\7V\2\2\u01c2P\3\2\2\2\u01c3\u01c4"+
"\u01c3\u01c4\7F\2\2\u01c4V\3\2\2\2\u01c5\u01c6\7O\2\2\u01c6\u01c7\7C\2"+ "\7N\2\2\u01c4\u01c5\7K\2\2\u01c5\u01c6\7M\2\2\u01c6\u01c7\7G\2\2\u01c7"+
"\2\u01c7\u01c8\7V\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7J\2\2\u01caX\3\2"+ "R\3\2\2\2\u01c8\u01c9\7N\2\2\u01c9\u01ca\7K\2\2\u01ca\u01cb\7O\2\2\u01cb"+
"\2\2\u01cb\u01cc\7P\2\2\u01cc\u01cd\7C\2\2\u01cd\u01ce\7V\2\2\u01ce\u01cf"+ "\u01cc\7K\2\2\u01cc\u01cd\7V\2\2\u01cdT\3\2\2\2\u01ce\u01cf\7O\2\2\u01cf"+
"\7W\2\2\u01cf\u01d0\7T\2\2\u01d0\u01d1\7C\2\2\u01d1\u01d2\7N\2\2\u01d2"+ "\u01d0\7C\2\2\u01d0\u01d1\7R\2\2\u01d1\u01d2\7R\2\2\u01d2\u01d3\7G\2\2"+
"Z\3\2\2\2\u01d3\u01d4\7P\2\2\u01d4\u01d5\7Q\2\2\u01d5\u01d6\7V\2\2\u01d6"+ "\u01d3\u01d4\7F\2\2\u01d4V\3\2\2\2\u01d5\u01d6\7O\2\2\u01d6\u01d7\7C\2"+
"\\\3\2\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9\7W\2\2\u01d9\u01da\7N\2\2\u01da"+ "\2\u01d7\u01d8\7V\2\2\u01d8\u01d9\7E\2\2\u01d9\u01da\7J\2\2\u01daX\3\2"+
"\u01db\7N\2\2\u01db^\3\2\2\2\u01dc\u01dd\7Q\2\2\u01dd\u01de\7P\2\2\u01de"+ "\2\2\u01db\u01dc\7P\2\2\u01dc\u01dd\7C\2\2\u01dd\u01de\7V\2\2\u01de\u01df"+
"`\3\2\2\2\u01df\u01e0\7Q\2\2\u01e0\u01e1\7R\2\2\u01e1\u01e2\7V\2\2\u01e2"+ "\7W\2\2\u01df\u01e0\7T\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7N\2\2\u01e2"+
"\u01e3\7K\2\2\u01e3\u01e4\7O\2\2\u01e4\u01e5\7K\2\2\u01e5\u01e6\7\\\2"+ "Z\3\2\2\2\u01e3\u01e4\7P\2\2\u01e4\u01e5\7Q\2\2\u01e5\u01e6\7V\2\2\u01e6"+
"\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7F\2\2\u01e8b\3\2\2\2\u01e9\u01ea\7"+ "\\\3\2\2\2\u01e7\u01e8\7P\2\2\u01e8\u01e9\7W\2\2\u01e9\u01ea\7N\2\2\u01ea"+
"Q\2\2\u01ea\u01eb\7T\2\2\u01ebd\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee"+ "\u01eb\7N\2\2\u01eb^\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee\7P\2\2\u01ee"+
"\7T\2\2\u01ee\u01ef\7F\2\2\u01ef\u01f0\7G\2\2\u01f0\u01f1\7T\2\2\u01f1"+ "`\3\2\2\2\u01ef\u01f0\7Q\2\2\u01f0\u01f1\7R\2\2\u01f1\u01f2\7V\2\2\u01f2"+
"f\3\2\2\2\u01f2\u01f3\7Q\2\2\u01f3\u01f4\7W\2\2\u01f4\u01f5\7V\2\2\u01f5"+ "\u01f3\7K\2\2\u01f3\u01f4\7O\2\2\u01f4\u01f5\7K\2\2\u01f5\u01f6\7\\\2"+
"\u01f6\7G\2\2\u01f6\u01f7\7T\2\2\u01f7h\3\2\2\2\u01f8\u01f9\7R\2\2\u01f9"+ "\2\u01f6\u01f7\7G\2\2\u01f7\u01f8\7F\2\2\u01f8b\3\2\2\2\u01f9\u01fa\7"+
"\u01fa\7C\2\2\u01fa\u01fb\7T\2\2\u01fb\u01fc\7U\2\2\u01fc\u01fd\7G\2\2"+ "Q\2\2\u01fa\u01fb\7T\2\2\u01fbd\3\2\2\2\u01fc\u01fd\7Q\2\2\u01fd\u01fe"+
"\u01fd\u01fe\7F\2\2\u01fej\3\2\2\2\u01ff\u0200\7R\2\2\u0200\u0201\7J\2"+ "\7T\2\2\u01fe\u01ff\7F\2\2\u01ff\u0200\7G\2\2\u0200\u0201\7T\2\2\u0201"+
"\2\u0201\u0202\7[\2\2\u0202\u0203\7U\2\2\u0203\u0204\7K\2\2\u0204\u0205"+ "f\3\2\2\2\u0202\u0203\7Q\2\2\u0203\u0204\7W\2\2\u0204\u0205\7V\2\2\u0205"+
"\7E\2\2\u0205\u0206\7C\2\2\u0206\u0207\7N\2\2\u0207l\3\2\2\2\u0208\u0209"+ "\u0206\7G\2\2\u0206\u0207\7T\2\2\u0207h\3\2\2\2\u0208\u0209\7R\2\2\u0209"+
"\7R\2\2\u0209\u020a\7N\2\2\u020a\u020b\7C\2\2\u020b\u020c\7P\2\2\u020c"+ "\u020a\7C\2\2\u020a\u020b\7T\2\2\u020b\u020c\7U\2\2\u020c\u020d\7G\2\2"+
"n\3\2\2\2\u020d\u020e\7T\2\2\u020e\u020f\7K\2\2\u020f\u0210\7I\2\2\u0210"+ "\u020d\u020e\7F\2\2\u020ej\3\2\2\2\u020f\u0210\7R\2\2\u0210\u0211\7J\2"+
"\u0211\7J\2\2\u0211\u0212\7V\2\2\u0212p\3\2\2\2\u0213\u0214\7T\2\2\u0214"+ "\2\u0211\u0212\7[\2\2\u0212\u0213\7U\2\2\u0213\u0214\7K\2\2\u0214\u0215"+
"\u0215\7N\2\2\u0215\u0216\7K\2\2\u0216\u0217\7M\2\2\u0217\u0218\7G\2\2"+ "\7E\2\2\u0215\u0216\7C\2\2\u0216\u0217\7N\2\2\u0217l\3\2\2\2\u0218\u0219"+
"\u0218r\3\2\2\2\u0219\u021a\7S\2\2\u021a\u021b\7W\2\2\u021b\u021c\7G\2"+ "\7R\2\2\u0219\u021a\7N\2\2\u021a\u021b\7C\2\2\u021b\u021c\7P\2\2\u021c"+
"\2\u021c\u021d\7T\2\2\u021d\u021e\7[\2\2\u021et\3\2\2\2\u021f\u0220\7"+ "n\3\2\2\2\u021d\u021e\7T\2\2\u021e\u021f\7K\2\2\u021f\u0220\7I\2\2\u0220"+
"U\2\2\u0220\u0221\7E\2\2\u0221\u0222\7J\2\2\u0222\u0223\7G\2\2\u0223\u0224"+ "\u0221\7J\2\2\u0221\u0222\7V\2\2\u0222p\3\2\2\2\u0223\u0224\7T\2\2\u0224"+
"\7O\2\2\u0224\u0225\7C\2\2\u0225\u0226\7U\2\2\u0226v\3\2\2\2\u0227\u0228"+ "\u0225\7N\2\2\u0225\u0226\7K\2\2\u0226\u0227\7M\2\2\u0227\u0228\7G\2\2"+
"\7U\2\2\u0228\u0229\7G\2\2\u0229\u022a\7N\2\2\u022a\u022b\7G\2\2\u022b"+ "\u0228r\3\2\2\2\u0229\u022a\7S\2\2\u022a\u022b\7W\2\2\u022b\u022c\7G\2"+
"\u022c\7E\2\2\u022c\u022d\7V\2\2\u022dx\3\2\2\2\u022e\u022f\7U\2\2\u022f"+ "\2\u022c\u022d\7T\2\2\u022d\u022e\7[\2\2\u022et\3\2\2\2\u022f\u0230\7"+
"\u0230\7J\2\2\u0230\u0231\7Q\2\2\u0231\u0232\7Y\2\2\u0232z\3\2\2\2\u0233"+ "U\2\2\u0230\u0231\7E\2\2\u0231\u0232\7J\2\2\u0232\u0233\7G\2\2\u0233\u0234"+
"\u0234\7U\2\2\u0234\u0235\7[\2\2\u0235\u0236\7U\2\2\u0236|\3\2\2\2\u0237"+ "\7O\2\2\u0234\u0235\7C\2\2\u0235\u0236\7U\2\2\u0236v\3\2\2\2\u0237\u0238"+
"\u0238\7V\2\2\u0238\u0239\7C\2\2\u0239\u023a\7D\2\2\u023a\u023b\7N\2\2"+ "\7U\2\2\u0238\u0239\7G\2\2\u0239\u023a\7N\2\2\u023a\u023b\7G\2\2\u023b"+
"\u023b\u023c\7G\2\2\u023c~\3\2\2\2\u023d\u023e\7V\2\2\u023e\u023f\7C\2"+ "\u023c\7E\2\2\u023c\u023d\7V\2\2\u023dx\3\2\2\2\u023e\u023f\7U\2\2\u023f"+
"\2\u023f\u0240\7D\2\2\u0240\u0241\7N\2\2\u0241\u0242\7G\2\2\u0242\u0243"+ "\u0240\7J\2\2\u0240\u0241\7Q\2\2\u0241\u0242\7Y\2\2\u0242z\3\2\2\2\u0243"+
"\7U\2\2\u0243\u0080\3\2\2\2\u0244\u0245\7V\2\2\u0245\u0246\7G\2\2\u0246"+ "\u0244\7U\2\2\u0244\u0245\7[\2\2\u0245\u0246\7U\2\2\u0246|\3\2\2\2\u0247"+
"\u0247\7Z\2\2\u0247\u0248\7V\2\2\u0248\u0082\3\2\2\2\u0249\u024a\7V\2"+ "\u0248\7V\2\2\u0248\u0249\7C\2\2\u0249\u024a\7D\2\2\u024a\u024b\7N\2\2"+
"\2\u024a\u024b\7T\2\2\u024b\u024c\7W\2\2\u024c\u024d\7G\2\2\u024d\u0084"+ "\u024b\u024c\7G\2\2\u024c~\3\2\2\2\u024d\u024e\7V\2\2\u024e\u024f\7C\2"+
"\3\2\2\2\u024e\u024f\7V\2\2\u024f\u0250\7[\2\2\u0250\u0251\7R\2\2\u0251"+ "\2\u024f\u0250\7D\2\2\u0250\u0251\7N\2\2\u0251\u0252\7G\2\2\u0252\u0253"+
"\u0252\7G\2\2\u0252\u0086\3\2\2\2\u0253\u0254\7V\2\2\u0254\u0255\7[\2"+ "\7U\2\2\u0253\u0080\3\2\2\2\u0254\u0255\7V\2\2\u0255\u0256\7G\2\2\u0256"+
"\2\u0255\u0256\7R\2\2\u0256\u0257\7G\2\2\u0257\u0258\7U\2\2\u0258\u0088"+ "\u0257\7Z\2\2\u0257\u0258\7V\2\2\u0258\u0082\3\2\2\2\u0259\u025a\7V\2"+
"\3\2\2\2\u0259\u025a\7W\2\2\u025a\u025b\7U\2\2\u025b\u025c\7K\2\2\u025c"+ "\2\u025a\u025b\7T\2\2\u025b\u025c\7W\2\2\u025c\u025d\7G\2\2\u025d\u0084"+
"\u025d\7P\2\2\u025d\u025e\7I\2\2\u025e\u008a\3\2\2\2\u025f\u0260\7X\2"+ "\3\2\2\2\u025e\u025f\7V\2\2\u025f\u0260\7[\2\2\u0260\u0261\7R\2\2\u0261"+
"\2\u0260\u0261\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7K\2\2\u0263\u0264"+ "\u0262\7G\2\2\u0262\u0086\3\2\2\2\u0263\u0264\7V\2\2\u0264\u0265\7[\2"+
"\7H\2\2\u0264\u0265\7[\2\2\u0265\u008c\3\2\2\2\u0266\u0267\7Y\2\2\u0267"+ "\2\u0265\u0266\7R\2\2\u0266\u0267\7G\2\2\u0267\u0268\7U\2\2\u0268\u0088"+
"\u0268\7J\2\2\u0268\u0269\7G\2\2\u0269\u026a\7T\2\2\u026a\u026b\7G\2\2"+ "\3\2\2\2\u0269\u026a\7W\2\2\u026a\u026b\7U\2\2\u026b\u026c\7K\2\2\u026c"+
"\u026b\u008e\3\2\2\2\u026c\u026d\7Y\2\2\u026d\u026e\7K\2\2\u026e\u026f"+ "\u026d\7P\2\2\u026d\u026e\7I\2\2\u026e\u008a\3\2\2\2\u026f\u0270\7X\2"+
"\7V\2\2\u026f\u0270\7J\2\2\u0270\u0090\3\2\2\2\u0271\u0272\7?\2\2\u0272"+ "\2\u0270\u0271\7G\2\2\u0271\u0272\7T\2\2\u0272\u0273\7K\2\2\u0273\u0274"+
"\u0092\3\2\2\2\u0273\u0274\7>\2\2\u0274\u027b\7@\2\2\u0275\u0276\7#\2"+ "\7H\2\2\u0274\u0275\7[\2\2\u0275\u008c\3\2\2\2\u0276\u0277\7Y\2\2\u0277"+
"\2\u0276\u027b\7?\2\2\u0277\u0278\7>\2\2\u0278\u0279\7?\2\2\u0279\u027b"+ "\u0278\7J\2\2\u0278\u0279\7G\2\2\u0279\u027a\7T\2\2\u027a\u027b\7G\2\2"+
"\7@\2\2\u027a\u0273\3\2\2\2\u027a\u0275\3\2\2\2\u027a\u0277\3\2\2\2\u027b"+ "\u027b\u008e\3\2\2\2\u027c\u027d\7Y\2\2\u027d\u027e\7K\2\2\u027e\u027f"+
"\u0094\3\2\2\2\u027c\u027d\7>\2\2\u027d\u0096\3\2\2\2\u027e\u027f\7>\2"+ "\7V\2\2\u027f\u0280\7J\2\2\u0280\u0090\3\2\2\2\u0281\u0282\7}\2\2\u0282"+
"\2\u027f\u0280\7?\2\2\u0280\u0098\3\2\2\2\u0281\u0282\7@\2\2\u0282\u009a"+ "\u0283\7G\2\2\u0283\u0284\7U\2\2\u0284\u0285\7E\2\2\u0285\u0286\7C\2\2"+
"\3\2\2\2\u0283\u0284\7@\2\2\u0284\u0285\7?\2\2\u0285\u009c\3\2\2\2\u0286"+ "\u0286\u0287\7R\2\2\u0287\u0288\7G\2\2\u0288\u0092\3\2\2\2\u0289\u028a"+
"\u0287\7-\2\2\u0287\u009e\3\2\2\2\u0288\u0289\7/\2\2\u0289\u00a0\3\2\2"+ "\7}\2\2\u028a\u028b\7H\2\2\u028b\u028c\7P\2\2\u028c\u0094\3\2\2\2\u028d"+
"\2\u028a\u028b\7,\2\2\u028b\u00a2\3\2\2\2\u028c\u028d\7\61\2\2\u028d\u00a4"+ "\u028e\7}\2\2\u028e\u028f\7N\2\2\u028f\u0290\7K\2\2\u0290\u0291\7O\2\2"+
"\3\2\2\2\u028e\u028f\7\'\2\2\u028f\u00a6\3\2\2\2\u0290\u0291\7~\2\2\u0291"+ "\u0291\u0292\7K\2\2\u0292\u0293\7V\2\2\u0293\u0096\3\2\2\2\u0294\u0295"+
"\u0292\7~\2\2\u0292\u00a8\3\2\2\2\u0293\u0294\7\60\2\2\u0294\u00aa\3\2"+ "\7}\2\2\u0295\u0296\7F\2\2\u0296\u0098\3\2\2\2\u0297\u0298\7}\2\2\u0298"+
"\2\2\u0295\u0296\7A\2\2\u0296\u00ac\3\2\2\2\u0297\u029d\7)\2\2\u0298\u029c"+ "\u0299\7V\2\2\u0299\u009a\3\2\2\2\u029a\u029b\7}\2\2\u029b\u029c\7V\2"+
"\n\2\2\2\u0299\u029a\7)\2\2\u029a\u029c\7)\2\2\u029b\u0298\3\2\2\2\u029b"+ "\2\u029c\u029d\7U\2\2\u029d\u009c\3\2\2\2\u029e\u029f\7}\2\2\u029f\u02a0"+
"\u0299\3\2\2\2\u029c\u029f\3\2\2\2\u029d\u029b\3\2\2\2\u029d\u029e\3\2"+ "\7I\2\2\u02a0\u02a1\7W\2\2\u02a1\u02a2\7K\2\2\u02a2\u02a3\7F\2\2\u02a3"+
"\2\2\u029e\u02a0\3\2\2\2\u029f\u029d\3\2\2\2\u02a0\u02a1\7)\2\2\u02a1"+ "\u009e\3\2\2\2\u02a4\u02a5\7\177\2\2\u02a5\u00a0\3\2\2\2\u02a6\u02a7\7"+
"\u00ae\3\2\2\2\u02a2\u02a4\5\u00bf`\2\u02a3\u02a2\3\2\2\2\u02a4\u02a5"+ "?\2\2\u02a7\u00a2\3\2\2\2\u02a8\u02a9\7>\2\2\u02a9\u02b0\7@\2\2\u02aa"+
"\3\2\2\2\u02a5\u02a3\3\2\2\2\u02a5\u02a6\3\2\2\2\u02a6\u00b0\3\2\2\2\u02a7"+ "\u02ab\7#\2\2\u02ab\u02b0\7?\2\2\u02ac\u02ad\7>\2\2\u02ad\u02ae\7?\2\2"+
"\u02a9\5\u00bf`\2\u02a8\u02a7\3\2\2\2\u02a9\u02aa\3\2\2\2\u02aa\u02a8"+ "\u02ae\u02b0\7@\2\2\u02af\u02a8\3\2\2\2\u02af\u02aa\3\2\2\2\u02af\u02ac"+
"\3\2\2\2\u02aa\u02ab\3\2\2\2\u02ab\u02ac\3\2\2\2\u02ac\u02b0\5\u00a9U"+ "\3\2\2\2\u02b0\u00a4\3\2\2\2\u02b1\u02b2\7>\2\2\u02b2\u00a6\3\2\2\2\u02b3"+
"\2\u02ad\u02af\5\u00bf`\2\u02ae\u02ad\3\2\2\2\u02af\u02b2\3\2\2\2\u02b0"+ "\u02b4\7>\2\2\u02b4\u02b5\7?\2\2\u02b5\u00a8\3\2\2\2\u02b6\u02b7\7@\2"+
"\u02ae\3\2\2\2\u02b0\u02b1\3\2\2\2\u02b1\u02d2\3\2\2\2\u02b2\u02b0\3\2"+ "\2\u02b7\u00aa\3\2\2\2\u02b8\u02b9\7@\2\2\u02b9\u02ba\7?\2\2\u02ba\u00ac"+
"\2\2\u02b3\u02b5\5\u00a9U\2\u02b4\u02b6\5\u00bf`\2\u02b5\u02b4\3\2\2\2"+ "\3\2\2\2\u02bb\u02bc\7-\2\2\u02bc\u00ae\3\2\2\2\u02bd\u02be\7/\2\2\u02be"+
"\u02b6\u02b7\3\2\2\2\u02b7\u02b5\3\2\2\2\u02b7\u02b8\3\2\2\2\u02b8\u02d2"+ "\u00b0\3\2\2\2\u02bf\u02c0\7,\2\2\u02c0\u00b2\3\2\2\2\u02c1\u02c2\7\61"+
"\3\2\2\2\u02b9\u02bb\5\u00bf`\2\u02ba\u02b9\3\2\2\2\u02bb\u02bc\3\2\2"+ "\2\2\u02c2\u00b4\3\2\2\2\u02c3\u02c4\7\'\2\2\u02c4\u00b6\3\2\2\2\u02c5"+
"\2\u02bc\u02ba\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02c5\3\2\2\2\u02be\u02c2"+ "\u02c6\7~\2\2\u02c6\u02c7\7~\2\2\u02c7\u00b8\3\2\2\2\u02c8\u02c9\7\60"+
"\5\u00a9U\2\u02bf\u02c1\5\u00bf`\2\u02c0\u02bf\3\2\2\2\u02c1\u02c4\3\2"+ "\2\2\u02c9\u00ba\3\2\2\2\u02ca\u02cb\7A\2\2\u02cb\u00bc\3\2\2\2\u02cc"+
"\2\2\u02c2\u02c0\3\2\2\2\u02c2\u02c3\3\2\2\2\u02c3\u02c6\3\2\2\2\u02c4"+ "\u02d2\7)\2\2\u02cd\u02d1\n\2\2\2\u02ce\u02cf\7)\2\2\u02cf\u02d1\7)\2"+
"\u02c2\3\2\2\2\u02c5\u02be\3\2\2\2\u02c5\u02c6\3\2\2\2\u02c6\u02c7\3\2"+ "\2\u02d0\u02cd\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d4\3\2\2\2\u02d2\u02d0"+
"\2\2\u02c7\u02c8\5\u00bd_\2\u02c8\u02d2\3\2\2\2\u02c9\u02cb\5\u00a9U\2"+ "\3\2\2\2\u02d2\u02d3\3\2\2\2\u02d3\u02d5\3\2\2\2\u02d4\u02d2\3\2\2\2\u02d5"+
"\u02ca\u02cc\5\u00bf`\2\u02cb\u02ca\3\2\2\2\u02cc\u02cd\3\2\2\2\u02cd"+ "\u02d6\7)\2\2\u02d6\u00be\3\2\2\2\u02d7\u02d9\5\u00cfh\2\u02d8\u02d7\3"+
"\u02cb\3\2\2\2\u02cd\u02ce\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d0\5\u00bd"+ "\2\2\2\u02d9\u02da\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02db\3\2\2\2\u02db"+
"_\2\u02d0\u02d2\3\2\2\2\u02d1\u02a8\3\2\2\2\u02d1\u02b3\3\2\2\2\u02d1"+ "\u00c0\3\2\2\2\u02dc\u02de\5\u00cfh\2\u02dd\u02dc\3\2\2\2\u02de\u02df"+
"\u02ba\3\2\2\2\u02d1\u02c9\3\2\2\2\u02d2\u00b2\3\2\2\2\u02d3\u02d6\5\u00c1"+ "\3\2\2\2\u02df\u02dd\3\2\2\2\u02df\u02e0\3\2\2\2\u02e0\u02e1\3\2\2\2\u02e1"+
"a\2\u02d4\u02d6\7a\2\2\u02d5\u02d3\3\2\2\2\u02d5\u02d4\3\2\2\2\u02d6\u02dc"+ "\u02e5\5\u00b9]\2\u02e2\u02e4\5\u00cfh\2\u02e3\u02e2\3\2\2\2\u02e4\u02e7"+
"\3\2\2\2\u02d7\u02db\5\u00c1a\2\u02d8\u02db\5\u00bf`\2\u02d9\u02db\t\3"+ "\3\2\2\2\u02e5\u02e3\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6\u0307\3\2\2\2\u02e7"+
"\2\2\u02da\u02d7\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02d9\3\2\2\2\u02db"+ "\u02e5\3\2\2\2\u02e8\u02ea\5\u00b9]\2\u02e9\u02eb\5\u00cfh\2\u02ea\u02e9"+
"\u02de\3\2\2\2\u02dc\u02da\3\2\2\2\u02dc\u02dd\3\2\2\2\u02dd\u00b4\3\2"+ "\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ea\3\2\2\2\u02ec\u02ed\3\2\2\2\u02ed"+
"\2\2\u02de\u02dc\3\2\2\2\u02df\u02e3\5\u00bf`\2\u02e0\u02e4\5\u00c1a\2"+ "\u0307\3\2\2\2\u02ee\u02f0\5\u00cfh\2\u02ef\u02ee\3\2\2\2\u02f0\u02f1"+
"\u02e1\u02e4\5\u00bf`\2\u02e2\u02e4\t\4\2\2\u02e3\u02e0\3\2\2\2\u02e3"+ "\3\2\2\2\u02f1\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2\u02fa\3\2\2\2\u02f3"+
"\u02e1\3\2\2\2\u02e3\u02e2\3\2\2\2\u02e4\u02e5\3\2\2\2\u02e5\u02e3\3\2"+ "\u02f7\5\u00b9]\2\u02f4\u02f6\5\u00cfh\2\u02f5\u02f4\3\2\2\2\u02f6\u02f9"+
"\2\2\u02e5\u02e6\3\2\2\2\u02e6\u00b6\3\2\2\2\u02e7\u02ec\5\u00c1a\2\u02e8"+ "\3\2\2\2\u02f7\u02f5\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8\u02fb\3\2\2\2\u02f9"+
"\u02ec\5\u00bf`\2\u02e9\u02ec\t\3\2\2\u02ea\u02ec\5\u00a1Q\2\u02eb\u02e7"+ "\u02f7\3\2\2\2\u02fa\u02f3\3\2\2\2\u02fa\u02fb\3\2\2\2\u02fb\u02fc\3\2"+
"\3\2\2\2\u02eb\u02e8\3\2\2\2\u02eb\u02e9\3\2\2\2\u02eb\u02ea\3\2\2\2\u02ec"+ "\2\2\u02fc\u02fd\5\u00cdg\2\u02fd\u0307\3\2\2\2\u02fe\u0300\5\u00b9]\2"+
"\u02ed\3\2\2\2\u02ed\u02eb\3\2\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00b8\3\2"+ "\u02ff\u0301\5\u00cfh\2\u0300\u02ff\3\2\2\2\u0301\u0302\3\2\2\2\u0302"+
"\2\2\u02ef\u02f5\7$\2\2\u02f0\u02f4\n\5\2\2\u02f1\u02f2\7$\2\2\u02f2\u02f4"+ "\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0304\3\2\2\2\u0304\u0305\5\u00cd"+
"\7$\2\2\u02f3\u02f0\3\2\2\2\u02f3\u02f1\3\2\2\2\u02f4\u02f7\3\2\2\2\u02f5"+ "g\2\u0305\u0307\3\2\2\2\u0306\u02dd\3\2\2\2\u0306\u02e8\3\2\2\2\u0306"+
"\u02f3\3\2\2\2\u02f5\u02f6\3\2\2\2\u02f6\u02f8\3\2\2\2\u02f7\u02f5\3\2"+ "\u02ef\3\2\2\2\u0306\u02fe\3\2\2\2\u0307\u00c2\3\2\2\2\u0308\u030b\5\u00d1"+
"\2\2\u02f8\u02f9\7$\2\2\u02f9\u00ba\3\2\2\2\u02fa\u0300\7b\2\2\u02fb\u02ff"+ "i\2\u0309\u030b\7a\2\2\u030a\u0308\3\2\2\2\u030a\u0309\3\2\2\2\u030b\u0311"+
"\n\6\2\2\u02fc\u02fd\7b\2\2\u02fd\u02ff\7b\2\2\u02fe\u02fb\3\2\2\2\u02fe"+ "\3\2\2\2\u030c\u0310\5\u00d1i\2\u030d\u0310\5\u00cfh\2\u030e\u0310\t\3"+
"\u02fc\3\2\2\2\u02ff\u0302\3\2\2\2\u0300\u02fe\3\2\2\2\u0300\u0301\3\2"+ "\2\2\u030f\u030c\3\2\2\2\u030f\u030d\3\2\2\2\u030f\u030e\3\2\2\2\u0310"+
"\2\2\u0301\u0303\3\2\2\2\u0302\u0300\3\2\2\2\u0303\u0304\7b\2\2\u0304"+ "\u0313\3\2\2\2\u0311\u030f\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u00c4\3\2"+
"\u00bc\3\2\2\2\u0305\u0307\7G\2\2\u0306\u0308\t\7\2\2\u0307\u0306\3\2"+ "\2\2\u0313\u0311\3\2\2\2\u0314\u0318\5\u00cfh\2\u0315\u0319\5\u00d1i\2"+
"\2\2\u0307\u0308\3\2\2\2\u0308\u030a\3\2\2\2\u0309\u030b\5\u00bf`\2\u030a"+ "\u0316\u0319\5\u00cfh\2\u0317\u0319\t\4\2\2\u0318\u0315\3\2\2\2\u0318"+
"\u0309\3\2\2\2\u030b\u030c\3\2\2\2\u030c\u030a\3\2\2\2\u030c\u030d\3\2"+ "\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u0318\3\2"+
"\2\2\u030d\u00be\3\2\2\2\u030e\u030f\t\b\2\2\u030f\u00c0\3\2\2\2\u0310"+ "\2\2\u031a\u031b\3\2\2\2\u031b\u00c6\3\2\2\2\u031c\u0321\5\u00d1i\2\u031d"+
"\u0311\t\t\2\2\u0311\u00c2\3\2\2\2\u0312\u0313\7/\2\2\u0313\u0314\7/\2"+ "\u0321\5\u00cfh\2\u031e\u0321\t\3\2\2\u031f\u0321\5\u00b1Y\2\u0320\u031c"+
"\2\u0314\u0318\3\2\2\2\u0315\u0317\n\n\2\2\u0316\u0315\3\2\2\2\u0317\u031a"+ "\3\2\2\2\u0320\u031d\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u031f\3\2\2\2\u0321"+
"\3\2\2\2\u0318\u0316\3\2\2\2\u0318\u0319\3\2\2\2\u0319\u031c\3\2\2\2\u031a"+ "\u0322\3\2\2\2\u0322\u0320\3\2\2\2\u0322\u0323\3\2\2\2\u0323\u00c8\3\2"+
"\u0318\3\2\2\2\u031b\u031d\7\17\2\2\u031c\u031b\3\2\2\2\u031c\u031d\3"+ "\2\2\u0324\u032a\7$\2\2\u0325\u0329\n\5\2\2\u0326\u0327\7$\2\2\u0327\u0329"+
"\2\2\2\u031d\u031f\3\2\2\2\u031e\u0320\7\f\2\2\u031f\u031e\3\2\2\2\u031f"+ "\7$\2\2\u0328\u0325\3\2\2\2\u0328\u0326\3\2\2\2\u0329\u032c\3\2\2\2\u032a"+
"\u0320\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u0322\bb\2\2\u0322\u00c4\3\2"+ "\u0328\3\2\2\2\u032a\u032b\3\2\2\2\u032b\u032d\3\2\2\2\u032c\u032a\3\2"+
"\2\2\u0323\u0324\7\61\2\2\u0324\u0325\7,\2\2\u0325\u032a\3\2\2\2\u0326"+ "\2\2\u032d\u032e\7$\2\2\u032e\u00ca\3\2\2\2\u032f\u0335\7b\2\2\u0330\u0334"+
"\u0329\5\u00c5c\2\u0327\u0329\13\2\2\2\u0328\u0326\3\2\2\2\u0328\u0327"+ "\n\6\2\2\u0331\u0332\7b\2\2\u0332\u0334\7b\2\2\u0333\u0330\3\2\2\2\u0333"+
"\3\2\2\2\u0329\u032c\3\2\2\2\u032a\u032b\3\2\2\2\u032a\u0328\3\2\2\2\u032b"+ "\u0331\3\2\2\2\u0334\u0337\3\2\2\2\u0335\u0333\3\2\2\2\u0335\u0336\3\2"+
"\u032d\3\2\2\2\u032c\u032a\3\2\2\2\u032d\u032e\7,\2\2\u032e\u032f\7\61"+ "\2\2\u0336\u0338\3\2\2\2\u0337\u0335\3\2\2\2\u0338\u0339\7b\2\2\u0339"+
"\2\2\u032f\u0330\3\2\2\2\u0330\u0331\bc\2\2\u0331\u00c6\3\2\2\2\u0332"+ "\u00cc\3\2\2\2\u033a\u033c\7G\2\2\u033b\u033d\t\7\2\2\u033c\u033b\3\2"+
"\u0334\t\13\2\2\u0333\u0332\3\2\2\2\u0334\u0335\3\2\2\2\u0335\u0333\3"+ "\2\2\u033c\u033d\3\2\2\2\u033d\u033f\3\2\2\2\u033e\u0340\5\u00cfh\2\u033f"+
"\2\2\2\u0335\u0336\3\2\2\2\u0336\u0337\3\2\2\2\u0337\u0338\bd\2\2\u0338"+ "\u033e\3\2\2\2\u0340\u0341\3\2\2\2\u0341\u033f\3\2\2\2\u0341\u0342\3\2"+
"\u00c8\3\2\2\2\u0339\u033a\13\2\2\2\u033a\u00ca\3\2\2\2\"\2\u027a\u029b"+ "\2\2\u0342\u00ce\3\2\2\2\u0343\u0344\t\b\2\2\u0344\u00d0\3\2\2\2\u0345"+
"\u029d\u02a5\u02aa\u02b0\u02b7\u02bc\u02c2\u02c5\u02cd\u02d1\u02d5\u02da"+ "\u0346\t\t\2\2\u0346\u00d2\3\2\2\2\u0347\u0348\7/\2\2\u0348\u0349\7/\2"+
"\u02dc\u02e3\u02e5\u02eb\u02ed\u02f3\u02f5\u02fe\u0300\u0307\u030c\u0318"+ "\2\u0349\u034d\3\2\2\2\u034a\u034c\n\n\2\2\u034b\u034a\3\2\2\2\u034c\u034f"+
"\u031c\u031f\u0328\u032a\u0335\3\2\3\2"; "\3\2\2\2\u034d\u034b\3\2\2\2\u034d\u034e\3\2\2\2\u034e\u0351\3\2\2\2\u034f"+
"\u034d\3\2\2\2\u0350\u0352\7\17\2\2\u0351\u0350\3\2\2\2\u0351\u0352\3"+
"\2\2\2\u0352\u0354\3\2\2\2\u0353\u0355\7\f\2\2\u0354\u0353\3\2\2\2\u0354"+
"\u0355\3\2\2\2\u0355\u0356\3\2\2\2\u0356\u0357\bj\2\2\u0357\u00d4\3\2"+
"\2\2\u0358\u0359\7\61\2\2\u0359\u035a\7,\2\2\u035a\u035f\3\2\2\2\u035b"+
"\u035e\5\u00d5k\2\u035c\u035e\13\2\2\2\u035d\u035b\3\2\2\2\u035d\u035c"+
"\3\2\2\2\u035e\u0361\3\2\2\2\u035f\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360"+
"\u0362\3\2\2\2\u0361\u035f\3\2\2\2\u0362\u0363\7,\2\2\u0363\u0364\7\61"+
"\2\2\u0364\u0365\3\2\2\2\u0365\u0366\bk\2\2\u0366\u00d6\3\2\2\2\u0367"+
"\u0369\t\13\2\2\u0368\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a\u0368\3"+
"\2\2\2\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\bl\2\2\u036d"+
"\u00d8\3\2\2\2\u036e\u036f\13\2\2\2\u036f\u00da\3\2\2\2\"\2\u02af\u02d0"+
"\u02d2\u02da\u02df\u02e5\u02ec\u02f1\u02f7\u02fa\u0302\u0306\u030a\u030f"+
"\u0311\u0318\u031a\u0320\u0322\u0328\u032a\u0333\u0335\u033c\u0341\u034d"+
"\u0351\u0354\u035d\u035f\u036a\3\2\3\2";
public static final ATN _ATN = public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray()); new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static { static {

View File

@ -1,8 +1,3 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// ANTLR GENERATED CODE: DO NOT EDIT // ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser; package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.ParseTreeListener; import org.antlr.v4.runtime.tree.ParseTreeListener;
@ -196,6 +191,16 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx); void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#limitClause}.
* @param ctx the parse tree
*/
void enterLimitClause(SqlBaseParser.LimitClauseContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#limitClause}.
* @param ctx the parse tree
*/
void exitLimitClause(SqlBaseParser.LimitClauseContext ctx);
/** /**
* Enter a parse tree produced by the {@code queryPrimaryDefault} * Enter a parse tree produced by the {@code queryPrimaryDefault}
* labeled alternative in {@link SqlBaseParser#queryTerm}. * labeled alternative in {@link SqlBaseParser#queryTerm}.
@ -514,6 +519,16 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void exitPattern(SqlBaseParser.PatternContext ctx); void exitPattern(SqlBaseParser.PatternContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#patternEscape}.
* @param ctx the parse tree
*/
void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#patternEscape}.
* @param ctx the parse tree
*/
void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
/** /**
* Enter a parse tree produced by the {@code valueExpressionDefault} * Enter a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link SqlBaseParser#valueExpression}. * labeled alternative in {@link SqlBaseParser#valueExpression}.
@ -611,17 +626,17 @@ interface SqlBaseListener extends ParseTreeListener {
*/ */
void exitStar(SqlBaseParser.StarContext ctx); void exitStar(SqlBaseParser.StarContext ctx);
/** /**
* Enter a parse tree produced by the {@code functionCall} * Enter a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}. * labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx); void enterFunction(SqlBaseParser.FunctionContext ctx);
/** /**
* Exit a parse tree produced by the {@code functionCall} * Exit a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}. * labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx); void exitFunction(SqlBaseParser.FunctionContext ctx);
/** /**
* Enter a parse tree produced by the {@code subqueryExpression} * Enter a parse tree produced by the {@code subqueryExpression}
* labeled alternative in {@link SqlBaseParser#primaryExpression}. * labeled alternative in {@link SqlBaseParser#primaryExpression}.
@ -670,6 +685,66 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#castExpression}.
* @param ctx the parse tree
*/
void enterCastExpression(SqlBaseParser.CastExpressionContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#castExpression}.
* @param ctx the parse tree
*/
void exitCastExpression(SqlBaseParser.CastExpressionContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#castTemplate}.
* @param ctx the parse tree
*/
void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#castTemplate}.
* @param ctx the parse tree
*/
void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#extractExpression}.
* @param ctx the parse tree
*/
void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#extractExpression}.
* @param ctx the parse tree
*/
void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#extractTemplate}.
* @param ctx the parse tree
*/
void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#extractTemplate}.
* @param ctx the parse tree
*/
void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#functionExpression}.
* @param ctx the parse tree
*/
void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#functionExpression}.
* @param ctx the parse tree
*/
void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
/**
* Enter a parse tree produced by {@link SqlBaseParser#functionTemplate}.
* @param ctx the parse tree
*/
void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
/**
* Exit a parse tree produced by {@link SqlBaseParser#functionTemplate}.
* @param ctx the parse tree
*/
void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
/** /**
* Enter a parse tree produced by the {@code nullLiteral} * Enter a parse tree produced by the {@code nullLiteral}
* labeled alternative in {@link SqlBaseParser#constant}. * labeled alternative in {@link SqlBaseParser#constant}.
@ -730,6 +805,54 @@ interface SqlBaseListener extends ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx); void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx);
/**
* Enter a parse tree produced by the {@code dateEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
/**
* Exit a parse tree produced by the {@code dateEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
/**
* Enter a parse tree produced by the {@code timeEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
/**
* Exit a parse tree produced by the {@code timeEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
/**
* Enter a parse tree produced by the {@code timestampEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
/**
* Exit a parse tree produced by the {@code timestampEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
/**
* Enter a parse tree produced by the {@code guidEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
/**
* Exit a parse tree produced by the {@code guidEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
*/
void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
/** /**
* Enter a parse tree produced by {@link SqlBaseParser#comparisonOperator}. * Enter a parse tree produced by {@link SqlBaseParser#comparisonOperator}.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -1,8 +1,3 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
// ANTLR GENERATED CODE: DO NOT EDIT // ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser; package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.ParseTreeVisitor; import org.antlr.v4.runtime.tree.ParseTreeVisitor;
@ -123,6 +118,12 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result * @return the visitor result
*/ */
T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx); T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#limitClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLimitClause(SqlBaseParser.LimitClauseContext ctx);
/** /**
* Visit a parse tree produced by the {@code queryPrimaryDefault} * Visit a parse tree produced by the {@code queryPrimaryDefault}
* labeled alternative in {@link SqlBaseParser#queryTerm}. * labeled alternative in {@link SqlBaseParser#queryTerm}.
@ -311,6 +312,12 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result * @return the visitor result
*/ */
T visitPattern(SqlBaseParser.PatternContext ctx); T visitPattern(SqlBaseParser.PatternContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#patternEscape}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx);
/** /**
* Visit a parse tree produced by the {@code valueExpressionDefault} * Visit a parse tree produced by the {@code valueExpressionDefault}
* labeled alternative in {@link SqlBaseParser#valueExpression}. * labeled alternative in {@link SqlBaseParser#valueExpression}.
@ -368,12 +375,12 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
*/ */
T visitStar(SqlBaseParser.StarContext ctx); T visitStar(SqlBaseParser.StarContext ctx);
/** /**
* Visit a parse tree produced by the {@code functionCall} * Visit a parse tree produced by the {@code function}
* labeled alternative in {@link SqlBaseParser#primaryExpression}. * labeled alternative in {@link SqlBaseParser#primaryExpression}.
* @param ctx the parse tree * @param ctx the parse tree
* @return the visitor result * @return the visitor result
*/ */
T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx); T visitFunction(SqlBaseParser.FunctionContext ctx);
/** /**
* Visit a parse tree produced by the {@code subqueryExpression} * Visit a parse tree produced by the {@code subqueryExpression}
* labeled alternative in {@link SqlBaseParser#primaryExpression}. * labeled alternative in {@link SqlBaseParser#primaryExpression}.
@ -402,6 +409,42 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result * @return the visitor result
*/ */
T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#castExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCastExpression(SqlBaseParser.CastExpressionContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#castTemplate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#extractExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#extractTemplate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#functionExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx);
/**
* Visit a parse tree produced by {@link SqlBaseParser#functionTemplate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx);
/** /**
* Visit a parse tree produced by the {@code nullLiteral} * Visit a parse tree produced by the {@code nullLiteral}
* labeled alternative in {@link SqlBaseParser#constant}. * labeled alternative in {@link SqlBaseParser#constant}.
@ -437,6 +480,34 @@ interface SqlBaseVisitor<T> extends ParseTreeVisitor<T> {
* @return the visitor result * @return the visitor result
*/ */
T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx); T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx);
/**
* Visit a parse tree produced by the {@code dateEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx);
/**
* Visit a parse tree produced by the {@code timeEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx);
/**
* Visit a parse tree produced by the {@code timestampEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx);
/**
* Visit a parse tree produced by the {@code guidEscapedLiteral}
* labeled alternative in {@link SqlBaseParser#constant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx);
/** /**
* Visit a parse tree produced by {@link SqlBaseParser#comparisonOperator}. * Visit a parse tree produced by {@link SqlBaseParser#comparisonOperator}.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -33,10 +33,13 @@ import java.util.BitSet;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.function.BiFunction; import java.util.function.BiFunction;
import java.util.function.Function; import java.util.function.Function;
import static java.lang.String.format;
public class SqlParser { public class SqlParser {
private static final Logger log = Loggers.getLogger(SqlParser.class); private static final Logger log = Loggers.getLogger(SqlParser.class);
@ -102,16 +105,30 @@ public class SqlParser {
if (DEBUG) { if (DEBUG) {
debug(parser); debug(parser);
tokenStream.fill();
for (Token t : tokenStream.getTokens()) {
String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType());
String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType());
log.info(format(Locale.ROOT, " %-15s '%s'",
symbolicName == null ? literalName : symbolicName,
t.getText()));
};
} }
ParserRuleContext tree = parseFunction.apply(parser); ParserRuleContext tree = parseFunction.apply(parser);
if (DEBUG) {
log.info("Parse tree {} " + tree.toStringTree());
}
return visitor.apply(new AstBuilder(paramTokens), tree); return visitor.apply(new AstBuilder(paramTokens), tree);
} }
private void debug(SqlBaseParser parser) { private void debug(SqlBaseParser parser) {
// when debugging, use the exact prediction mode (needed for diagnostics as well) // when debugging, use the exact prediction mode (needed for diagnostics as well)
parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.addParseListener(parser.new TraceListener()); parser.addParseListener(parser.new TraceListener());

View File

@ -159,4 +159,14 @@ public class VerifierErrorMessagesTests extends ESTestCase {
assertEquals("1:44: Cannot order by non-grouped column [SCORE()], expected [int]", assertEquals("1:44: Cannot order by non-grouped column [SCORE()], expected [int]",
verify("SELECT int FROM test GROUP BY int ORDER BY SCORE()")); verify("SELECT int FROM test GROUP BY int ORDER BY SCORE()"));
} }
public void testHavingOnColumn() {
assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead",
verify("SELECT int FROM test GROUP BY int HAVING int > 2"));
}
public void testHavingOnScalar() {
assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead",
verify("SELECT int FROM test GROUP BY int HAVING 2 < ABS(int)"));
}
} }

View File

@ -0,0 +1,237 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.sql.parser;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
import org.elasticsearch.xpack.sql.expression.Expression;
import org.elasticsearch.xpack.sql.expression.Literal;
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.regex.Like;
import org.elasticsearch.xpack.sql.expression.regex.LikePattern;
import org.elasticsearch.xpack.sql.plan.logical.Limit;
import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan;
import org.elasticsearch.xpack.sql.plan.logical.With;
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
import org.elasticsearch.xpack.sql.type.DataType;
import org.junit.Assert;
import java.util.List;
import java.util.Locale;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
public class EscapedFunctionsTests extends ESTestCase {
private final SqlParser parser = new SqlParser();
private Literal dateLiteral(String date) {
Expression exp = parser.createExpression(format(Locale.ROOT, "{d '%s'}", date));
assertThat(exp, instanceOf(Expression.class));
return (Literal) exp;
}
private Literal timeLiteral(String date) {
Expression exp = parser.createExpression(format(Locale.ROOT, "{t '%s'}", date));
assertThat(exp, instanceOf(Expression.class));
return (Literal) exp;
}
private Literal timestampLiteral(String date) {
Expression exp = parser.createExpression(format(Locale.ROOT, "{ts '%s'}", date));
assertThat(exp, instanceOf(Expression.class));
return (Literal) exp;
}
private Literal guidLiteral(String date) {
Expression exp = parser.createExpression(format(Locale.ROOT, "{guid '%s'}", date));
assertThat(exp, instanceOf(Expression.class));
return (Literal) exp;
}
private Limit limit(int limit) {
LogicalPlan plan = parser.createStatement(format(Locale.ROOT, "SELECT * FROM emp {limit %d}", limit));
assertThat(plan, instanceOf(With.class));
With with = (With) plan;
Limit limitPlan = (Limit) (with.child());
assertThat(limitPlan.limit(), instanceOf(Literal.class));
return limitPlan;
}
private LikePattern likeEscape(String like, String character) {
Expression exp = parser.createExpression(format(Locale.ROOT, "exp LIKE '%s' {escape '%s'}", like, character));
assertThat(exp, instanceOf(Like.class));
return ((Like) exp).right();
}
private Function function(String name) {
Expression exp = parser.createExpression(format(Locale.ROOT, "{fn %s}", name));
assertThat(exp, instanceOf(Function.class));
return (Function) exp;
}
public void testFunctionNoArg() {
Function f = function("SCORE()");
assertEquals("SCORE", f.functionName());
}
public void testFunctionOneArg() {
Function f = function("ABS(foo)");
assertEquals("ABS", f.functionName());
assertEquals(1, f.arguments().size());
Expression arg = f.arguments().get(0);
assertThat(arg, instanceOf(UnresolvedAttribute.class));
UnresolvedAttribute ua = (UnresolvedAttribute) arg;
assertThat(ua.name(), is("foo"));
}
public void testFunctionOneArgFunction() {
Function f = function("ABS({fn SCORE()})");
assertEquals("ABS", f.functionName());
assertEquals(1, f.arguments().size());
Expression arg = f.arguments().get(0);
assertThat(arg, instanceOf(UnresolvedFunction.class));
UnresolvedFunction uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("SCORE"));
}
public void testFunctionFloorWithExtract() {
Function f = function("CAST({fn FLOOR({fn EXTRACT(YEAR FROM \"foo\")})} AS int)");
assertEquals("CAST", f.functionName());
assertEquals(1, f.arguments().size());
Expression arg = f.arguments().get(0);
assertThat(arg, instanceOf(UnresolvedFunction.class));
f = (Function) arg;
assertEquals("FLOOR", f.functionName());
assertEquals(1, f.arguments().size());
arg = f.arguments().get(0);
assertThat(arg, instanceOf(UnresolvedFunction.class));
UnresolvedFunction uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("YEAR"));
}
public void testFunctionWithFunctionWithArg() {
Function f = function("POWER(foo, {fn POWER({fn SCORE()}, {fN SCORE()})})");
assertEquals("POWER", f.functionName());
assertEquals(2, f.arguments().size());
Expression arg = f.arguments().get(1);
assertThat(arg, instanceOf(UnresolvedFunction.class));
UnresolvedFunction uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("POWER"));
assertEquals(2, uf.arguments().size());
List<Expression> args = uf.arguments();
arg = args.get(0);
assertThat(arg, instanceOf(UnresolvedFunction.class));
uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("SCORE"));
arg = args.get(1);
assertThat(arg, instanceOf(UnresolvedFunction.class));
uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("SCORE"));
}
public void testFunctionWithFunctionWithArgAndParams() {
Function f = (Function) parser.createExpression("POWER(?, {fn POWER({fn ABS(?)}, {fN ABS(?)})})",
asList(new SqlTypedParamValue(DataType.LONG, 1),
new SqlTypedParamValue(DataType.LONG, 1),
new SqlTypedParamValue(DataType.LONG, 1)));
assertEquals("POWER", f.functionName());
assertEquals(2, f.arguments().size());
Expression arg = f.arguments().get(1);
assertThat(arg, instanceOf(UnresolvedFunction.class));
UnresolvedFunction uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("POWER"));
assertEquals(2, uf.arguments().size());
List<Expression> args = uf.arguments();
arg = args.get(0);
assertThat(arg, instanceOf(UnresolvedFunction.class));
uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("ABS"));
arg = args.get(1);
assertThat(arg, instanceOf(UnresolvedFunction.class));
uf = (UnresolvedFunction) arg;
assertThat(uf.name(), is("ABS"));
}
public void testDateLiteral() {
Literal l = dateLiteral("2012-01-01");
assertThat(l.dataType(), is(DataType.DATE));
}
public void testDateLiteralValidation() {
ParsingException ex = expectThrows(ParsingException.class, () -> dateLiteral("2012-13-01"));
assertEquals("line 1:2: Invalid date received; Cannot parse \"2012-13-01\": Value 13 for monthOfYear must be in the range [1,12]",
ex.getMessage());
}
public void testTimeLiteralUnsupported() {
SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> timeLiteral("10:10:10"));
assertThat(ex.getMessage(), is("Time (only) literals are not supported; a date component is required as well"));
}
public void testTimeLiteralValidation() {
ParsingException ex = expectThrows(ParsingException.class, () -> timeLiteral("10:10:65"));
assertEquals("line 1:2: Invalid time received; Cannot parse \"10:10:65\": Value 65 for secondOfMinute must be in the range [0,59]",
ex.getMessage());
}
public void testTimestampLiteral() {
Literal l = timestampLiteral("2012-01-01 10:01:02.3456");
assertThat(l.dataType(), is(DataType.DATE));
}
public void testTimestampLiteralValidation() {
ParsingException ex = expectThrows(ParsingException.class, () -> timestampLiteral("2012-01-01T10:01:02.3456"));
assertEquals(
"line 1:2: Invalid timestamp received; Invalid format: \"2012-01-01T10:01:02.3456\" is malformed at \"T10:01:02.3456\"",
ex.getMessage());
}
public void testGUID() {
Literal l = guidLiteral("12345678-90ab-cdef-0123-456789abcdef");
assertThat(l.dataType(), is(DataType.KEYWORD));
l = guidLiteral("12345678-90AB-cdef-0123-456789ABCdef");
assertThat(l.dataType(), is(DataType.KEYWORD));
}
public void testGUIDValidationHexa() {
ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678-90ab-cdef-0123-456789abcdeH"));
assertEquals("line 1:8: Invalid GUID, expected hexadecimal at offset[35], found [H]", ex.getMessage());
}
public void testGUIDValidationGroups() {
ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90ab-cdef-0123-456789abcdeH"));
assertEquals("line 1:8: Invalid GUID, expected group separator at offset [8], found [A]", ex.getMessage());
}
public void testGUIDValidationLength() {
ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90"));
assertEquals("line 1:8: Invalid GUID, too short", ex.getMessage());
}
public void testLimit() {
Limit limit = limit(10);
Literal l = (Literal) limit.limit();
Assert.assertThat(l.value(), is(10));
}
public void testLikeEscape() {
LikePattern pattern = likeEscape("|%tring", "|");
assertThat(pattern.escape(), is('|'));
}
}

View File

@ -481,7 +481,7 @@ public class SlackMessageTests extends ESTestCase {
if (randomBoolean()) { if (randomBoolean()) {
templateBuilder.setText(randomAlphaOfLength(10)); templateBuilder.setText(randomAlphaOfLength(10));
} }
if (randomBoolean()) { if (templateBuilder.text == null || randomBoolean()) {
int count = randomIntBetween(0, 3); int count = randomIntBetween(0, 3);
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder(); Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder();