expectedParams = new HashMap<>();
String repository = randomIndicesNames(1, 1)[0];
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
index 18a43ffa8d4..ce9091a91ff 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
@@ -19,12 +19,8 @@
package org.elasticsearch.client;
-import org.apache.http.HttpEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
-import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.explain.ExplainRequest;
@@ -101,85 +97,106 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
@Before
public void indexDocuments() throws IOException {
- StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/1", Collections.emptyMap(), doc1);
- StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/2", Collections.emptyMap(), doc2);
- StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/3", Collections.emptyMap(), doc3);
- StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/4", Collections.emptyMap(), doc4);
- StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index/type/5", Collections.emptyMap(), doc5);
- client().performRequest(HttpPost.METHOD_NAME, "/index/_refresh");
+ {
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1");
+ doc1.setJsonEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}");
+ client().performRequest(doc1);
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2");
+ doc2.setJsonEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}");
+ client().performRequest(doc2);
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3");
+ doc3.setJsonEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}");
+ client().performRequest(doc3);
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4");
+ doc4.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
+ client().performRequest(doc4);
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5");
+ doc5.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}");
+ client().performRequest(doc5);
+ }
+ {
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/doc/1");
+ doc1.setJsonEntity("{\"field\":\"value1\", \"rating\": 7}");
+ client().performRequest(doc1);
+ Request doc2 = new Request(HttpPut.METHOD_NAME, "/index1/doc/2");
+ doc2.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc2);
+ }
- StringEntity doc = new StringEntity("{\"field\":\"value1\", \"rating\": 7}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/1", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/2", Collections.emptyMap(), doc);
-
- StringEntity mappings = new StringEntity(
- "{" +
- " \"mappings\": {" +
- " \"doc\": {" +
- " \"properties\": {" +
- " \"rating\": {" +
- " \"type\": \"keyword\"" +
- " }" +
- " }" +
- " }" +
- " }" +
- "}}",
- ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "/index2", Collections.emptyMap(), mappings);
- doc = new StringEntity("{\"field\":\"value1\", \"rating\": \"good\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/3", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/4", Collections.emptyMap(), doc);
-
- doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc);
- doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc);
-
- mappings = new StringEntity(
- "{" +
+ {
+ Request create = new Request("PUT", "/index2");
+ create.setJsonEntity(
+ "{" +
" \"mappings\": {" +
" \"doc\": {" +
" \"properties\": {" +
- " \"field1\": {" +
- " \"type\": \"keyword\"," +
- " \"store\": true" +
- " }," +
- " \"field2\": {" +
- " \"type\": \"keyword\"," +
- " \"store\": true" +
+ " \"rating\": {" +
+ " \"type\": \"keyword\"" +
" }" +
" }" +
" }" +
" }" +
- "}}",
- ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings);
- doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc);
- StringEntity aliasFilter = new StringEntity(
- "{" +
- " \"actions\" : [" +
- " {" +
- " \"add\" : {" +
- " \"index\" : \"index4\"," +
- " \"alias\" : \"alias4\"," +
- " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
- " }" +
- " }" +
- " ]" +
- "}",
- ContentType.APPLICATION_JSON);
- client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter);
+ "}");
+ client().performRequest(create);
+ Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/doc/3");
+ doc3.setJsonEntity("{\"field\":\"value1\", \"rating\": \"good\"}");
+ client().performRequest(doc3);
+ Request doc4 = new Request(HttpPut.METHOD_NAME, "/index2/doc/4");
+ doc4.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc4);
+ }
- client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh");
+ {
+ Request doc5 = new Request(HttpPut.METHOD_NAME, "/index3/doc/5");
+ doc5.setJsonEntity("{\"field\":\"value1\"}");
+ client().performRequest(doc5);
+ Request doc6 = new Request(HttpPut.METHOD_NAME, "/index3/doc/6");
+ doc6.setJsonEntity("{\"field\":\"value2\"}");
+ client().performRequest(doc6);
+ }
+
+ {
+ Request create = new Request(HttpPut.METHOD_NAME, "/index4");
+ create.setJsonEntity(
+ "{" +
+ " \"mappings\": {" +
+ " \"doc\": {" +
+ " \"properties\": {" +
+ " \"field1\": {" +
+ " \"type\": \"keyword\"," +
+ " \"store\": true" +
+ " }," +
+ " \"field2\": {" +
+ " \"type\": \"keyword\"," +
+ " \"store\": true" +
+ " }" +
+ " }" +
+ " }" +
+ " }" +
+ "}");
+ client().performRequest(create);
+ Request doc1 = new Request(HttpPut.METHOD_NAME, "/index4/doc/1");
+ doc1.setJsonEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}");
+ client().performRequest(doc1);
+
+ Request createFilteredAlias = new Request(HttpPost.METHOD_NAME, "/_aliases");
+ createFilteredAlias.setJsonEntity(
+ "{" +
+ " \"actions\" : [" +
+ " {" +
+ " \"add\" : {" +
+ " \"index\" : \"index4\"," +
+ " \"alias\" : \"alias4\"," +
+ " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" +
+ " }" +
+ " }" +
+ " ]" +
+ "}");
+ client().performRequest(createFilteredAlias);
+ }
+
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
}
public void testSearchNoQuery() throws IOException {
@@ -377,7 +394,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
public void testSearchWithParentJoin() throws IOException {
final String indexName = "child_example";
- StringEntity parentMapping = new StringEntity("{\n" +
+ Request createIndex = new Request(HttpPut.METHOD_NAME, "/" + indexName);
+ createIndex.setJsonEntity(
+ "{\n" +
" \"mappings\": {\n" +
" \"qa\" : {\n" +
" \"properties\" : {\n" +
@@ -388,9 +407,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" }\n" +
" }\n" +
" }" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName, Collections.emptyMap(), parentMapping);
- StringEntity questionDoc = new StringEntity("{\n" +
+ "}");
+ client().performRequest(createIndex);
+ Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1");
+ questionDoc.setJsonEntity(
+ "{\n" +
" \"body\": \"I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" +
" \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" +
" \"tags\": [\n" +
@@ -399,9 +420,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"file-transfer\"\n" +
" ],\n" +
" \"qa_join_field\" : \"question\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1", Collections.emptyMap(), questionDoc);
- StringEntity answerDoc1 = new StringEntity("{\n" +
+ "}");
+ client().performRequest(questionDoc);
+ Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2");
+ answerDoc1.addParameter("routing", "1");
+ answerDoc1.setJsonEntity(
+ "{\n" +
" \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Sam\",\n" +
@@ -413,9 +437,12 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" +
" },\n" +
" \"creation_date\": \"2009-05-04T13:45:37.030\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2", Collections.singletonMap("routing", "1"), answerDoc1);
- StringEntity answerDoc2 = new StringEntity("{\n" +
+ "}");
+ client().performRequest(answerDoc1);
+ Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3");
+ answerDoc2.addParameter("routing", "1");
+ answerDoc2.setJsonEntity(
+ "{\n" +
" \"owner\": {\n" +
" \"location\": \"Norfolk, United Kingdom\",\n" +
" \"display_name\": \"Troll\",\n" +
@@ -427,9 +454,9 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
" \"parent\" : \"1\"\n" +
" },\n" +
" \"creation_date\": \"2009-05-05T13:45:37.030\"\n" +
- "}", ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3", Collections.singletonMap("routing", "1"), answerDoc2);
- client().performRequest(HttpPost.METHOD_NAME, "/_refresh");
+ "}");
+ client().performRequest(answerDoc2);
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh"));
TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING)
.field("owner.display_name.keyword").size(10);
@@ -506,9 +533,10 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
}
public void testSearchWithWeirdScriptFields() throws Exception {
- HttpEntity entity = new NStringEntity("{ \"field\":\"value\"}", ContentType.APPLICATION_JSON);
- client().performRequest("PUT", "test/type/1", Collections.emptyMap(), entity);
- client().performRequest("POST", "/test/_refresh");
+ Request doc = new Request("PUT", "test/type/1");
+ doc.setJsonEntity("{\"field\":\"value\"}");
+ client().performRequest(doc);
+ client().performRequest(new Request("POST", "/test/_refresh"));
{
SearchRequest searchRequest = new SearchRequest("test").source(SearchSourceBuilder.searchSource()
@@ -547,13 +575,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
}
public void testSearchScroll() throws Exception {
-
for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
- HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
- client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity);
+ Request doc = new Request(HttpPut.METHOD_NAME, "/test/type1/" + Integer.toString(i));
+ doc.setJsonEntity(Strings.toString(builder));
+ client().performRequest(doc);
}
- client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh");
+ client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh"));
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC);
SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder);
@@ -878,11 +906,11 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
}
-
-
+
+
public void testMultiSearchTemplate() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
-
+
SearchTemplateRequest goodRequest = new SearchTemplateRequest();
goodRequest.setRequest(new SearchRequest("index"));
goodRequest.setScriptType(ScriptType.INLINE);
@@ -900,8 +928,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
goodRequest.setExplain(true);
goodRequest.setProfile(true);
multiSearchTemplateRequest.add(goodRequest);
-
-
+
+
SearchTemplateRequest badRequest = new SearchTemplateRequest();
badRequest.setRequest(new SearchRequest("index"));
badRequest.setScriptType(ScriptType.INLINE);
@@ -910,17 +938,17 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", 10);
badRequest.setScriptParams(scriptParams);
- multiSearchTemplateRequest.add(badRequest);
-
+ multiSearchTemplateRequest.add(badRequest);
+
MultiSearchTemplateResponse multiSearchTemplateResponse =
- execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
+ execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync);
-
+
Item[] responses = multiSearchTemplateResponse.getResponses();
-
+
assertEquals(2, responses.length);
-
-
+
+
assertNull(responses[0].getResponse().getSource());
SearchResponse goodResponse =responses[0].getResponse().getResponse();
assertNotNull(goodResponse);
@@ -930,18 +958,18 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f));
SearchHit hit = goodResponse.getHits().getHits()[0];
assertNotNull(hit.getExplanation());
- assertFalse(goodResponse.getProfileResults().isEmpty());
-
-
+ assertFalse(goodResponse.getProfileResults().isEmpty());
+
+
assertNull(responses[0].getResponse().getSource());
assertThat(responses[1].isFailure(), Matchers.is(true));
- assertNotNull(responses[1].getFailureMessage());
+ assertNotNull(responses[1].getFailureMessage());
assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception"));
}
-
+
public void testMultiSearchTemplateAllBad() throws Exception {
MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest();
-
+
SearchTemplateRequest badRequest1 = new SearchTemplateRequest();
badRequest1.setRequest(new SearchRequest("index"));
badRequest1.setScriptType(ScriptType.INLINE);
@@ -957,8 +985,8 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER");
badRequest1.setScriptParams(scriptParams);
multiSearchTemplateRequest.add(badRequest1);
-
-
+
+
SearchTemplateRequest badRequest2 = new SearchTemplateRequest();
badRequest2.setRequest(new SearchRequest("index"));
badRequest2.setScriptType(ScriptType.INLINE);
@@ -967,13 +995,13 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
scriptParams.put("number", "BAD NUMBER");
badRequest2.setScriptParams(scriptParams);
- multiSearchTemplateRequest.add(badRequest2);
-
- // The whole HTTP request should fail if no nested search requests are valid
+ multiSearchTemplateRequest.add(badRequest2);
+
+ // The whole HTTP request should fail if no nested search requests are valid
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
- () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
+ () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate,
highLevelClient()::multiSearchTemplateAsync));
-
+
assertEquals(RestStatus.BAD_REQUEST, exception.status());
assertThat(exception.getMessage(), containsString("no requests added"));
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
index 7ec2ee80f04..45f9b5bbb0b 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
@@ -28,6 +28,9 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequ
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
@@ -43,6 +46,7 @@ import java.util.stream.Collectors;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
public class SnapshotIT extends ESRestHighLevelClientTestCase {
@@ -173,6 +177,34 @@ public class SnapshotIT extends ESRestHighLevelClientTestCase {
contains("test_snapshot1", "test_snapshot2"));
}
+ public void testSnapshotsStatus() throws IOException {
+ String testRepository = "test";
+ String testSnapshot = "snapshot";
+ String testIndex = "test_index";
+
+ PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}");
+ assertTrue(putRepositoryResponse.isAcknowledged());
+
+ createIndex(testIndex, Settings.EMPTY);
+
+ CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot);
+ createSnapshotRequest.indices(testIndex);
+ createSnapshotRequest.waitForCompletion(true);
+ CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest);
+ // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
+ assertEquals(RestStatus.OK, createSnapshotResponse.status());
+
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+ request.repository(testRepository);
+ request.snapshots(new String[]{testSnapshot});
+ SnapshotsStatusResponse response = execute(request, highLevelClient().snapshot()::status,
+ highLevelClient().snapshot()::statusAsync);
+ assertThat(response.getSnapshots().size(), equalTo(1));
+ assertThat(response.getSnapshots().get(0).getSnapshot().getRepository(), equalTo(testRepository));
+ assertThat(response.getSnapshots().get(0).getSnapshot().getSnapshotId().getName(), equalTo(testSnapshot));
+ assertThat(response.getSnapshots().get(0).getIndices().containsKey(testIndex), is(true));
+ }
+
public void testDeleteSnapshot() throws IOException {
String repository = "test_repository";
String snapshot = "test_snapshot";
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
index b8a6b7d2d8a..9dad115643c 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java
@@ -19,8 +19,6 @@
package org.elasticsearch.client.documentation;
-import org.apache.http.entity.ContentType;
-import org.apache.http.nio.entity.NStringEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
@@ -66,7 +64,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
-import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
@@ -756,7 +753,9 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
public void testGet() throws Exception {
RestHighLevelClient client = highLevelClient();
{
- String mappings = "{\n" +
+ Request createIndex = new Request("PUT", "/posts");
+ createIndex.setJsonEntity(
+ "{\n" +
" \"mappings\" : {\n" +
" \"doc\" : {\n" +
" \"properties\" : {\n" +
@@ -767,10 +766,8 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
" }\n" +
" }\n" +
" }\n" +
- "}";
-
- NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON);
- Response response = client().performRequest("PUT", "/posts", Collections.emptyMap(), entity);
+ "}");
+ Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
IndexRequest indexRequest = new IndexRequest("posts", "doc", "1")
@@ -1071,21 +1068,21 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
RestHighLevelClient client = highLevelClient();
{
- String mappings = "{\n" +
- " \"mappings\" : {\n" +
- " \"type\" : {\n" +
- " \"properties\" : {\n" +
- " \"foo\" : {\n" +
- " \"type\": \"text\",\n" +
- " \"store\": true\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- " }\n" +
- "}";
-
- NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON);
- Response response = client().performRequest("PUT", "/index", Collections.emptyMap(), entity);
+ Request createIndex = new Request("PUT", "/index");
+ createIndex.setJsonEntity(
+ "{\n" +
+ " \"mappings\" : {\n" +
+ " \"type\" : {\n" +
+ " \"properties\" : {\n" +
+ " \"foo\" : {\n" +
+ " \"type\": \"text\",\n" +
+ " \"store\": true\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ " }\n" +
+ "}");
+ Response response = client().performRequest(createIndex);
assertEquals(200, response.getStatusLine().getStatusCode());
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
index 48d01963e23..403ebc7d774 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -37,11 +37,16 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest;
import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest;
+import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@@ -84,8 +89,8 @@ import static org.hamcrest.Matchers.equalTo;
public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase {
private static final String repositoryName = "test_repository";
-
private static final String snapshotName = "test_snapshot";
+ private static final String indexName = "test_index";
public void testSnapshotCreateRepository() throws IOException {
RestHighLevelClient client = highLevelClient();
@@ -466,6 +471,7 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
RestHighLevelClient client = highLevelClient();
createTestRepositories();
+ createTestIndex();
createTestSnapshots();
// tag::get-snapshots-request
@@ -543,10 +549,84 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
+ public void testSnapshotSnapshotsStatus() throws IOException {
+ RestHighLevelClient client = highLevelClient();
+ createTestRepositories();
+ createTestIndex();
+ createTestSnapshots();
+
+ // tag::snapshots-status-request
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+ // end::snapshots-status-request
+
+ // tag::snapshots-status-request-repository
+ request.repository(repositoryName); // <1>
+ // end::snapshots-status-request-repository
+ // tag::snapshots-status-request-snapshots
+ String [] snapshots = new String[] {snapshotName};
+ request.snapshots(snapshots); // <1>
+ // end::snapshots-status-request-snapshots
+ // tag::snapshots-status-request-ignoreUnavailable
+ request.ignoreUnavailable(true); // <1>
+ // end::snapshots-status-request-ignoreUnavailable
+ // tag::snapshots-status-request-masterTimeout
+ request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
+ request.masterNodeTimeout("1m"); // <2>
+ // end::snapshots-status-request-masterTimeout
+
+ // tag::snapshots-status-execute
+ SnapshotsStatusResponse response = client.snapshot().status(request, RequestOptions.DEFAULT);
+ // end::snapshots-status-execute
+
+ // tag::snapshots-status-response
+ List snapshotStatusesResponse = response.getSnapshots();
+ SnapshotStatus snapshotStatus = snapshotStatusesResponse.get(0); // <1>
+ SnapshotsInProgress.State snapshotState = snapshotStatus.getState(); // <2>
+ SnapshotStats shardStats = snapshotStatus.getIndices().get(indexName).getShards().get(0).getStats(); // <3>
+ // end::snapshots-status-response
+ assertThat(snapshotStatusesResponse.size(), equalTo(1));
+ assertThat(snapshotStatusesResponse.get(0).getSnapshot().getRepository(), equalTo(SnapshotClientDocumentationIT.repositoryName));
+ assertThat(snapshotStatusesResponse.get(0).getSnapshot().getSnapshotId().getName(), equalTo(snapshotName));
+ assertThat(snapshotState.completed(), equalTo(true));
+ }
+
+ public void testSnapshotSnapshotsStatusAsync() throws InterruptedException {
+ RestHighLevelClient client = highLevelClient();
+ {
+ SnapshotsStatusRequest request = new SnapshotsStatusRequest();
+
+ // tag::snapshots-status-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(SnapshotsStatusResponse snapshotsStatusResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::snapshots-status-execute-listener
+
+ // Replace the empty listener with a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::snapshots-status-execute-async
+ client.snapshot().statusAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::snapshots-status-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
public void testSnapshotDeleteSnapshot() throws IOException {
RestHighLevelClient client = highLevelClient();
createTestRepositories();
+ createTestIndex();
createTestSnapshots();
// tag::delete-snapshot-request
@@ -608,9 +688,14 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged());
}
+ private void createTestIndex() throws IOException {
+ createIndex(indexName, Settings.EMPTY);
+ }
+
private void createTestSnapshots() throws IOException {
Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName));
createSnapshot.addParameter("wait_for_completion", "true");
+ createSnapshot.setJsonEntity("{\"indices\":\"" + indexName + "\"}");
Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot);
// check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead.
assertEquals(200, response.getStatusLine().getStatusCode());
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
index 199b7542e62..93f8481bea6 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java
@@ -76,7 +76,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
try {
try (RestClient client = buildRestClient()) {
try {
- client.performRequest("GET", "/");
+ client.performRequest(new Request("GET", "/"));
fail("connection should have been rejected due to SSL handshake");
} catch (Exception e) {
assertThat(e.getMessage(), containsString("General SSLEngine problem"));
@@ -85,7 +85,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase {
SSLContext.setDefault(getSslContext());
try (RestClient client = buildRestClient()) {
- Response response = client.performRequest("GET", "/");
+ Response response = client.performRequest(new Request("GET", "/"));
assertEquals(200, response.getStatusLine().getStatusCode());
}
} finally {
diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
index 114d34c73da..6b5bb3c98ee 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java
@@ -256,35 +256,51 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testEncodeParams() throws IOException {
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this/is/the/routing"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "this/is/the/routing");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%2Fis%2Fthe%2Frouting", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this|is|the|routing"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "this|is|the|routing");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=this%7Cis%7Cthe%7Crouting", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "routing#1"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "routing#1");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=routing%231", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "中文"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "中文");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=%E4%B8%AD%E6%96%87", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo+bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo+bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Bbar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo/bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo/bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%2Fbar", response.getRequestLine().getUri());
}
{
- Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo^bar"));
+ Request request = new Request("PUT", "/200");
+ request.addParameter("routing", "foo^bar");
+ Response response = restClient.performRequest(request);
assertEquals(pathPrefix + "/200?routing=foo%5Ebar", response.getRequestLine().getUri());
}
}
@@ -341,14 +357,14 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
public void testUrlWithoutLeadingSlash() throws Exception {
if (pathPrefix.length() == 0) {
try {
- restClient.performRequest("GET", "200");
+ restClient.performRequest(new Request("GET", "200"));
fail("request should have failed");
} catch (ResponseException e) {
assertEquals(404, e.getResponse().getStatusLine().getStatusCode());
}
} else {
{
- Response response = restClient.performRequest("GET", "200");
+ Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode());
}
@@ -357,7 +373,7 @@ public class RestClientSingleHostIntegTests extends RestClientTestCase {
try (RestClient restClient = RestClient.builder(
new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()))
.setPathPrefix(pathPrefix.substring(1)).build()) {
- Response response = restClient.performRequest("GET", "200");
+ Response response = restClient.performRequest(new Request("GET", "200"));
//a trailing slash gets automatically added if a pathPrefix is configured
assertEquals(200, response.getStatusLine().getStatusCode());
}
diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
index d347353a1fb..ce2e0907560 100644
--- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
+++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java
@@ -267,7 +267,7 @@ public class RestClientDocumentation {
}
{
//tag::rest-client-response2
- Response response = restClient.performRequest("GET", "/");
+ Response response = restClient.performRequest(new Request("GET", "/"));
RequestLine requestLine = response.getRequestLine(); // <1>
HttpHost host = response.getHost(); // <2>
int statusCode = response.getStatusLine().getStatusCode(); // <3>
diff --git a/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc
new file mode 100644
index 00000000000..8f91d774f4e
--- /dev/null
+++ b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc
@@ -0,0 +1,97 @@
+[[java-rest-high-snapshot-snapshots-status]]
+=== Snapshots Status API
+
+The Snapshots Status API allows to retrieve detailed information about snapshots in progress.
+
+[[java-rest-high-snapshot-snapshots-status-request]]
+==== Snapshots Status Request
+
+A `SnapshotsStatusRequest`:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request]
+--------------------------------------------------
+
+==== Required Arguments
+The following arguments must be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-repository]
+--------------------------------------------------
+<1> Sets the repository to check for snapshot statuses
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-snapshots]
+--------------------------------------------------
+<1> The list of snapshot names to check the status of
+
+==== Optional Arguments
+The following arguments can optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-ignoreUnavailable]
+--------------------------------------------------
+<1> The command will fail if some of the snapshots are unavailable. The `ignore_unavailable` flag
+set to true will return all snapshots that are currently available.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-masterTimeout]
+--------------------------------------------------
+<1> Timeout to connect to the master node as a `TimeValue`
+<2> Timeout to connect to the master node as a `String`
+
+[[java-rest-high-snapshot-snapshots-status-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute]
+--------------------------------------------------
+
+[[java-rest-high-snapshot-snapshots-status-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of retrieving snapshot statuses requires both the
+`SnapshotsStatusRequest` instance and an `ActionListener` instance to be
+passed to the asynchronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-async]
+--------------------------------------------------
+<1> The `SnapshotsStatusRequest` to execute and the `ActionListener`
+to use when the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `SnapshotsStatusResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of a failure. The raised exception is provided as an argument
+
+[[java-rest-high-snapshot-snapshots-status-response]]
+==== Snapshots Status Response
+
+The returned `SnapshotsStatusResponse` allows to retrieve information about the
+executed operation as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-response]
+--------------------------------------------------
+<1> Request contains a list of snapshot statuses
+<2> Each status contains information about the snapshot
+<3> Example of reading snapshot statistics about a specific index and shard
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index e69f53eb4ba..cf38040e865 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -154,6 +154,7 @@ The Java High Level REST Client supports the following Snapshot APIs:
* <>
* <>
* <>
+* <>
* <>
include::snapshot/get_repository.asciidoc[]
@@ -162,6 +163,7 @@ include::snapshot/delete_repository.asciidoc[]
include::snapshot/verify_repository.asciidoc[]
include::snapshot/create_snapshot.asciidoc[]
include::snapshot/get_snapshots.asciidoc[]
+include::snapshot/snapshots_status.asciidoc[]
include::snapshot/delete_snapshot.asciidoc[]
== Tasks APIs
diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
index c2d1614ad6e..efbd8ef7389 100644
--- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -33,7 +33,7 @@ Available expressions for interval: `year` (`1y`), `quarter` (`1q`), `month` (`1
Time values can also be specified via abbreviations supported by <> parsing.
Note that fractional time values are not supported, but you can address this by shifting to another
time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than
-than days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not).
+days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not).
[source,js]
--------------------------------------------------
diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index 39006d1ab53..a29a743fed8 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -104,6 +104,11 @@ With that out of the way, let's get started with the fun part...
== Installation
+You can skip installation completely by using our hosted
+Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
+available on AWS and GCP. You can
+https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed):
[source,sh]
diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc
index 783cb804e7a..7675e5ad146 100644
--- a/docs/reference/setup/install.asciidoc
+++ b/docs/reference/setup/install.asciidoc
@@ -1,6 +1,11 @@
[[install-elasticsearch]]
== Installing Elasticsearch
+Elasticsearch can be run on your own hardware or using our hosted
+Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
+available on AWS and GCP. You can
+https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
Elasticsearch is provided in the following package formats:
[horizontal]
@@ -38,7 +43,7 @@ Elasticsearch on Windows. MSIs may be downloaded from the Elasticsearch website.
`docker`::
Images are available for running Elasticsearch as Docker containers. They may be
-downloaded from the Elastic Docker Registry.
+downloaded from the Elastic Docker Registry.
+
{ref}/docker.html[Install {es} with Docker]
diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc
index b18f7c57a16..523217b921a 100644
--- a/docs/reference/setup/install/docker.asciidoc
+++ b/docs/reference/setup/install/docker.asciidoc
@@ -8,8 +8,6 @@ A list of all published Docker images and tags can be found in
https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found
on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub].
-==== Image types
-
These images are free to use under the Elastic license. They contain open source
and free commercial features and access to paid commercial features.
{xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the
@@ -17,9 +15,6 @@ paid commercial features. See the
https://www.elastic.co/subscriptions[Subscriptions] page for information about
Elastic license levels.
-Alternatively, you can download `-oss` images, which contain only features that
-are available under the Apache 2.0 license.
-
==== Pulling the image
Obtaining {es} for Docker is as simple as issuing a +docker pull+ command
@@ -34,14 +29,17 @@ endif::[]
ifeval::["{release-state}"!="unreleased"]
-Docker images can be retrieved with the following commands:
+For example, the Docker image can be retrieved with the following command:
["source","sh",subs="attributes"]
--------------------------------------------
docker pull {docker-repo}:{version}
-docker pull {docker-repo}-oss:{version}
--------------------------------------------
+Alternatively, you can download other Docker images that contain only features
+that are available under the Apache 2.0 license from
+https://www.docker.elastic.co[www.docker.elastic.co].
+
endif::[]
[[docker-cli-run]]
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
index b44eaa3bfa3..0d6253c88f9 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
@@ -32,6 +32,8 @@ import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.ConfigurationUtils;
import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor;
+import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.TemplateScript;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
@@ -42,21 +44,22 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final String TYPE = "date_index_name";
private final String field;
- private final String indexNamePrefix;
- private final String dateRounding;
- private final String indexNameFormat;
+ private final TemplateScript.Factory indexNamePrefixTemplate;
+ private final TemplateScript.Factory dateRoundingTemplate;
+ private final TemplateScript.Factory indexNameFormatTemplate;
private final DateTimeZone timezone;
private final List> dateFormats;
DateIndexNameProcessor(String tag, String field, List> dateFormats, DateTimeZone timezone,
- String indexNamePrefix, String dateRounding, String indexNameFormat) {
+ TemplateScript.Factory indexNamePrefixTemplate, TemplateScript.Factory dateRoundingTemplate,
+ TemplateScript.Factory indexNameFormatTemplate) {
super(tag);
this.field = field;
this.timezone = timezone;
this.dateFormats = dateFormats;
- this.indexNamePrefix = indexNamePrefix;
- this.dateRounding = dateRounding;
- this.indexNameFormat = indexNameFormat;
+ this.indexNamePrefixTemplate = indexNamePrefixTemplate;
+ this.dateRoundingTemplate = dateRoundingTemplate;
+ this.indexNameFormatTemplate = indexNameFormatTemplate;
}
@Override
@@ -83,6 +86,9 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
if (dateTime == null) {
throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException);
}
+ String indexNamePrefix = ingestDocument.renderTemplate(indexNamePrefixTemplate);
+ String indexNameFormat = ingestDocument.renderTemplate(indexNameFormatTemplate);
+ String dateRounding = ingestDocument.renderTemplate(dateRoundingTemplate);
DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat);
StringBuilder builder = new StringBuilder()
@@ -106,16 +112,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
return field;
}
- String getIndexNamePrefix() {
- return indexNamePrefix;
+ TemplateScript.Factory getIndexNamePrefixTemplate() {
+ return indexNamePrefixTemplate;
}
- String getDateRounding() {
- return dateRounding;
+ TemplateScript.Factory getDateRoundingTemplate() {
+ return dateRoundingTemplate;
}
- String getIndexNameFormat() {
- return indexNameFormat;
+ TemplateScript.Factory getIndexNameFormatTemplate() {
+ return indexNameFormatTemplate;
}
DateTimeZone getTimezone() {
@@ -128,6 +134,12 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
public static final class Factory implements Processor.Factory {
+ private final ScriptService scriptService;
+
+ public Factory(ScriptService scriptService) {
+ this.scriptService = scriptService;
+ }
+
@Override
public DateIndexNameProcessor create(Map registry, String tag,
Map config) throws Exception {
@@ -154,9 +166,16 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field");
String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", "");
+ TemplateScript.Factory indexNamePrefixTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_prefix", indexNamePrefix, scriptService);
String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding");
+ TemplateScript.Factory dateRoundingTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "date_rounding", dateRounding, scriptService);
String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd");
- return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat);
+ TemplateScript.Factory indexNameFormatTemplate =
+ ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_format", indexNameFormat, scriptService);
+ return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefixTemplate,
+ dateRoundingTemplate, indexNameFormatTemplate);
}
}
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
index d9878cae9e2..bc475a2a005 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java
@@ -73,7 +73,7 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory());
processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService));
processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory());
- processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory());
+ processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService));
processors.put(SortProcessor.TYPE, new SortProcessor.Factory());
processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters)));
processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService));
@@ -97,12 +97,12 @@ public class IngestCommonPlugin extends Plugin implements ActionPlugin, IngestPl
Supplier nodesInCluster) {
return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController));
}
-
+
@Override
public List> getSettings() {
return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME);
}
-
+
private static ThreadWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) {
long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis();
long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis();
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
index 3b9e2121c95..2735cf55776 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java
@@ -20,18 +20,20 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone;
import java.util.Arrays;
+import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class DateIndexNameFactoryTests extends ESTestCase {
public void testDefaults() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("field", "_field");
config.put("date_rounding", "y");
@@ -39,14 +41,14 @@ public class DateIndexNameFactoryTests extends ESTestCase {
DateIndexNameProcessor processor = factory.create(null, null, config);
assertThat(processor.getDateFormats().size(), Matchers.equalTo(1));
assertThat(processor.getField(), Matchers.equalTo("_field"));
- assertThat(processor.getIndexNamePrefix(), Matchers.equalTo(""));
- assertThat(processor.getDateRounding(), Matchers.equalTo("y"));
- assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd"));
+ assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo(""));
+ assertThat(processor.getDateRoundingTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("y"));
+ assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyy-MM-dd"));
assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC));
}
public void testSpecifyOptionalSettings() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("field", "_field");
config.put("index_name_prefix", "_prefix");
@@ -63,7 +65,7 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("index_name_format", "yyyyMMdd");
processor = factory.create(null, null, config);
- assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd"));
+ assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyyMMdd"));
config = new HashMap<>();
config.put("field", "_field");
@@ -80,11 +82,11 @@ public class DateIndexNameFactoryTests extends ESTestCase {
config.put("date_rounding", "y");
processor = factory.create(null, null, config);
- assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix"));
+ assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("_prefix"));
}
public void testRequiredFields() throws Exception {
- DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory();
+ DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance());
Map config = new HashMap<>();
config.put("date_rounding", "y");
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
@@ -95,5 +97,4 @@ public class DateIndexNameFactoryTests extends ESTestCase {
e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config));
assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing"));
}
-
}
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
index eba37dc7421..c97da116e34 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
@@ -19,11 +19,14 @@
package org.elasticsearch.ingest.common;
import org.elasticsearch.ingest.IngestDocument;
+import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
+import org.joda.time.format.DateTimeFormat;
import java.util.Collections;
+import java.util.List;
import java.util.Locale;
import java.util.function.Function;
@@ -33,11 +36,8 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testJodaPattern() throws Exception {
Function function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT);
- DateIndexNameProcessor processor = new DateIndexNameProcessor(
- "_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC,
- "events-", "y", "yyyyMMdd"
- );
-
+ DateIndexNameProcessor processor = createProcessor("_field", Collections.singletonList(function),
+ DateTimeZone.UTC, "events-", "y", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z"));
processor.execute(document);
@@ -46,7 +46,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testTAI64N()throws Exception {
Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"));
@@ -56,7 +56,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnixMs()throws Exception {
Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000500"));
@@ -71,7 +71,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
public void testUnix()throws Exception {
Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
- DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
+ DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000.5"));
@@ -79,4 +79,33 @@ public class DateIndexNameProcessorTests extends ESTestCase {
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
+ public void testTemplatedFields() throws Exception {
+ String indexNamePrefix = randomAlphaOfLength(10);
+ String dateRounding = randomFrom("y", "M", "w", "d", "h", "m", "s");
+ String indexNameFormat = randomFrom("yyyy-MM-dd'T'HH:mm:ss.SSSZ", "yyyyMMdd", "MM/dd/yyyy");
+ String date = Integer.toString(randomInt());
+ Function dateTimeFunction = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
+
+ DateIndexNameProcessor dateProcessor = createProcessor("_field",
+ Collections.singletonList(dateTimeFunction), DateTimeZone.UTC, indexNamePrefix,
+ dateRounding, indexNameFormat);
+
+ IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
+ Collections.singletonMap("_field", date));
+ dateProcessor.execute(document);
+
+ assertThat(document.getSourceAndMetadata().get("_index"),
+ equalTo("<"+indexNamePrefix+"{"+DateTimeFormat.forPattern(indexNameFormat)
+ .print(dateTimeFunction.apply(date))+"||/"+dateRounding+"{"+indexNameFormat+"|UTC}}>"));
+ }
+
+ private DateIndexNameProcessor createProcessor(String field, List> dateFormats,
+ DateTimeZone timezone, String indexNamePrefix, String dateRounding,
+ String indexNameFormat) {
+ return new DateIndexNameProcessor(randomAlphaOfLength(10), field, dateFormats, timezone,
+ new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix),
+ new TestTemplateService.MockTemplateScript.Factory(dateRounding),
+ new TestTemplateService.MockTemplateScript.Factory(indexNameFormat)
+ );
+ }
}
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
index 8fba759aa16..43a5f9245b1 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java
@@ -24,9 +24,10 @@ import org.elasticsearch.ingest.RandomDocumentPicks;
import org.elasticsearch.ingest.TestTemplateService;
import org.elasticsearch.script.TemplateScript;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
@@ -36,19 +37,21 @@ import java.util.Map;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
-import static org.joda.time.DateTimeZone.UTC;
public class DateProcessorTests extends ESTestCase {
+
private TemplateScript.Factory templatize(Locale locale) {
return new TestTemplateService.MockTemplateScript.Factory(locale.getLanguage());
}
- private TemplateScript.Factory templatize(DateTimeZone timezone) {
- return new TestTemplateService.MockTemplateScript.Factory(timezone.getID());
+ private TemplateScript.Factory templatize(ZoneId timezone) {
+ // prevent writing "UTC" as string, as joda time does not parse it
+ String id = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId();
+ return new TestTemplateService.MockTemplateScript.Factory(id);
}
public void testJodaPattern() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010 12 06 11:05:15");
@@ -63,7 +66,7 @@ public class DateProcessorTests extends ESTestCase {
matchFormats.add("dd/MM/yyyy");
matchFormats.add("dd-MM-yyyy");
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", matchFormats, "date_as_date");
Map document = new HashMap<>();
@@ -98,7 +101,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidJodaPattern() {
try {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
- templatize(UTC), templatize(randomLocale(random())),
+ templatize(ZoneOffset.UTC), templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("invalid pattern"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010");
@@ -112,7 +115,7 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternLocale() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ITALIAN),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN),
"date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010 12 giugno");
@@ -123,18 +126,18 @@ public class DateProcessorTests extends ESTestCase {
public void testJodaPatternDefaultYear() {
DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10),
- templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH),
+ templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH),
"date_as_string", Collections.singletonList("dd/MM"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "12/06");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
dateProcessor.execute(ingestDocument);
assertThat(ingestDocument.getFieldValue("date_as_date", String.class),
- equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
+ equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00"));
}
public void testTAI64N() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(DateTimeZone.forOffsetHours(2)),
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.ofHours(2)),
templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("TAI64N"), "date_as_date");
Map document = new HashMap<>();
@@ -146,8 +149,8 @@ public class DateProcessorTests extends ESTestCase {
}
public void testUnixMs() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), templatize(randomLocale(random())),
- "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
+ templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "1000500");
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
@@ -162,7 +165,7 @@ public class DateProcessorTests extends ESTestCase {
}
public void testUnix() {
- DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC),
+ DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC),
templatize(randomLocale(random())),
"date_as_string", Collections.singletonList("UNIX"), "date_as_date");
Map document = new HashMap<>();
@@ -186,7 +189,7 @@ public class DateProcessorTests extends ESTestCase {
public void testInvalidLocale() {
DateProcessor processor = new DateProcessor(randomAlphaOfLength(10),
- templatize(UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"),
+ templatize(ZoneOffset.UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"),
"date_as_string", Collections.singletonList("yyyy"), "date_as_date");
Map document = new HashMap<>();
document.put("date_as_string", "2010");
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
index e9082c96fd1..6aa1046492c 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java
@@ -19,19 +19,13 @@
package org.elasticsearch.index.reindex;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
-import org.elasticsearch.client.Response;
-import org.elasticsearch.common.xcontent.XContentHelper;
-import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.client.Request;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Map;
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.hasEntry;
/**
@@ -50,48 +44,69 @@ public class ManyDocumentsIT extends ESRestTestCase {
bulk.append("{\"index\":{}}\n");
bulk.append("{\"test\":\"test\"}\n");
}
- client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"),
- new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON));
+ Request request = new Request("POST", "/test/test/_bulk");
+ request.addParameter("refresh", "true");
+ request.setJsonEntity(bulk.toString());
+ client().performRequest(request);
}
public void testReindex() throws IOException {
- Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity(
- "{\"source\":{\"index\":\"test\"}, \"dest\":{\"index\":\"des\"}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/_reindex");
+ request.setJsonEntity(
+ "{\n" +
+ " \"source\":{\n" +
+ " \"index\":\"test\"\n" +
+ " },\n" +
+ " \"dest\":{\n" +
+ " \"index\":\"des\"\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count));
}
public void testReindexFromRemote() throws IOException {
- Map, ?> nodesInfo = toMap(client().performRequest("GET", "/_nodes/http"));
+ Map, ?> nodesInfo = entityAsMap(client().performRequest(new Request("GET", "/_nodes/http")));
nodesInfo = (Map, ?>) nodesInfo.get("nodes");
Map, ?> nodeInfo = (Map, ?>) nodesInfo.values().iterator().next();
Map, ?> http = (Map, ?>) nodeInfo.get("http");
String remote = "http://"+ http.get("publish_address");
- Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity(
- "{\"source\":{\"index\":\"test\",\"remote\":{\"host\":\"" + remote + "\"}}, \"dest\":{\"index\":\"des\"}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/_reindex");
+ request.setJsonEntity(
+ "{\n" +
+ " \"source\":{\n" +
+ " \"index\":\"test\",\n" +
+ " \"remote\":{\n" +
+ " \"host\":\"" + remote + "\"\n" +
+ " }\n" +
+ " }\n," +
+ " \"dest\":{\n" +
+ " \"index\":\"des\"\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("created", count));
}
public void testUpdateByQuery() throws IOException {
- Map response = toMap(client().performRequest("POST", "/test/_update_by_query"));
+ Map response = entityAsMap(client().performRequest(new Request("POST", "/test/_update_by_query")));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("updated", count));
}
public void testDeleteByQuery() throws IOException {
- Map response = toMap(client().performRequest("POST", "/test/_delete_by_query", emptyMap(), new StringEntity(
- "{\"query\":{\"match_all\":{}}}",
- ContentType.APPLICATION_JSON)));
+ Request request = new Request("POST", "/test/_delete_by_query");
+ request.setJsonEntity(
+ "{\n" +
+ " \"query\":{\n" +
+ " \"match_all\": {}\n" +
+ " }\n" +
+ "}");
+ Map response = entityAsMap(client().performRequest(request));
assertThat(response, hasEntry("total", count));
assertThat(response, hasEntry("deleted", count));
}
-
- static Map toMap(Response response) throws IOException {
- return XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false);
- }
-
}
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
index 5d359053a66..9feed83595f 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
+++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java
@@ -19,25 +19,24 @@
package org.elasticsearch.index.reindex.remote;
-import org.apache.http.HttpEntity;
import org.apache.http.HttpHost;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.apache.http.util.EntityUtils;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
-import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException;
-import java.util.Map;
-import java.util.TreeMap;
-import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
public class ReindexFromOldRemoteIT extends ESRestTestCase {
+ /**
+ * Number of documents to test when reindexing from an old version.
+ */
+ private static final int DOCS = 5;
+
private void oldEsTestCase(String portPropertyName, String requestsPerSecond) throws IOException {
boolean enabled = Booleans.parseBoolean(System.getProperty("tests.fromOld"));
assumeTrue("test is disabled, probably because this is windows", enabled);
@@ -45,17 +44,19 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
int oldEsPort = Integer.parseInt(System.getProperty(portPropertyName));
try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) {
try {
- HttpEntity entity = new StringEntity("{\"settings\":{\"number_of_shards\": 1}}", ContentType.APPLICATION_JSON);
- oldEs.performRequest("PUT", "/test", singletonMap("refresh", "true"), entity);
+ Request createIndex = new Request("PUT", "/test");
+ createIndex.setJsonEntity("{\"settings\":{\"number_of_shards\": 1}}");
+ oldEs.performRequest(createIndex);
- entity = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON);
- oldEs.performRequest("PUT", "/test/doc/testdoc1", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc2", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc3", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc4", singletonMap("refresh", "true"), entity);
- oldEs.performRequest("PUT", "/test/doc/testdoc5", singletonMap("refresh", "true"), entity);
+ for (int i = 0; i < DOCS; i++) {
+ Request doc = new Request("PUT", "/test/doc/testdoc" + i);
+ doc.addParameter("refresh", "true");
+ doc.setJsonEntity("{\"test\":\"test\"}");
+ oldEs.performRequest(doc);
+ }
- entity = new StringEntity(
+ Request reindex = new Request("POST", "/_reindex");
+ reindex.setJsonEntity(
"{\n"
+ " \"source\":{\n"
+ " \"index\": \"test\",\n"
@@ -67,36 +68,23 @@ public class ReindexFromOldRemoteIT extends ESRestTestCase {
+ " \"dest\": {\n"
+ " \"index\": \"test\"\n"
+ " }\n"
- + "}",
- ContentType.APPLICATION_JSON);
- Map params = new TreeMap<>();
- params.put("refresh", "true");
- params.put("pretty", "true");
+ + "}");
+ reindex.addParameter("refresh", "true");
+ reindex.addParameter("pretty", "true");
if (requestsPerSecond != null) {
- params.put("requests_per_second", requestsPerSecond);
+ reindex.addParameter("requests_per_second", requestsPerSecond);
}
- client().performRequest("POST", "/_reindex", params, entity);
+ client().performRequest(reindex);
- Response response = client().performRequest("POST", "test/_search", singletonMap("pretty", "true"));
+ Request search = new Request("POST", "/test/_search");
+ search.addParameter("pretty", "true");
+ Response response = client().performRequest(search);
String result = EntityUtils.toString(response.getEntity());
- assertThat(result, containsString("\"_id\" : \"testdoc1\""));
- } finally {
- try {
- oldEs.performRequest("DELETE", "/test");
- } catch (ResponseException e) {
- /* Try not to throw ResponseException for as it'll eat the
- * real exception. This is because the rest client throws
- * exceptions in a "funny" way that isn't compatible with
- * `suppressed`. In the case of 404s we'll just log something
- * and move on because that just means that a previous
- * failure caused the index not to be created. */
- if (e.getResponse().getStatusLine().getStatusCode() == 404) {
- logger.warn("old index not deleted because it doesn't exist");
- } else {
- logger.error("failed to remove old index", e);
- fail("failed to remove old index, see log");
- }
+ for (int i = 0; i < DOCS; i++) {
+ assertThat(result, containsString("\"_id\" : \"testdoc" + i + "\""));
}
+ } finally {
+ oldEs.performRequest(new Request("DELETE", "/test"));
}
}
}
diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
index f33fa98f0e3..65d9b87b07d 100644
--- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
+++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java
@@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.PathUtils;
@@ -44,7 +45,6 @@ import java.net.URL;
import java.util.List;
import java.util.Map;
-import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
@@ -70,8 +70,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
**/
@Before
public void registerRepositories() throws IOException {
- Response clusterSettingsResponse = client().performRequest("GET", "/_cluster/settings?include_defaults=true" +
- "&filter_path=defaults.path.repo,defaults.repositories.url.allowed_urls");
+ Request clusterSettingsRequest = new Request("GET", "/_cluster/settings");
+ clusterSettingsRequest.addParameter("include_defaults", "true");
+ clusterSettingsRequest.addParameter("filter_path", "defaults.path.repo,defaults.repositories.url.allowed_urls");
+ Response clusterSettingsResponse = client().performRequest(clusterSettingsRequest);
Map clusterSettings = entityAsMap(clusterSettingsResponse);
@SuppressWarnings("unchecked")
@@ -83,13 +85,17 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize();
// Create a FS repository using the path.repo location
- Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(),
- buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build()));
+ Request createFsRepositoryRequest = new Request("PUT", "/_snapshot/repository-fs");
+ createFsRepositoryRequest.setEntity(buildRepositorySettings(FsRepository.TYPE,
+ Settings.builder().put("location", pathRepo).build()));
+ Response createFsRepositoryResponse = client().performRequest(createFsRepositoryRequest);
assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the file://{path.repo} URL
- Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(),
- buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build()));
+ Request createFileRepositoryRequest = new Request("PUT", "/_snapshot/repository-file");
+ createFileRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
+ Settings.builder().put("url", pathRepoUri.toString()).build()));
+ Response createFileRepositoryResponse = client().performRequest(createFileRepositoryRequest);
assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
// Create a URL repository using the http://{fixture} URL
@@ -99,8 +105,10 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
try {
InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost());
if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) {
- Response createUrlRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-url", emptyMap(),
- buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", allowedUrl).build()));
+ Request createUrlRepositoryRequest = new Request("PUT", "/_snapshot/repository-url");
+ createUrlRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE,
+ Settings.builder().put("url", allowedUrl).build()));
+ Response createUrlRepositoryResponse = client().performRequest(createUrlRepositoryRequest);
assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
break;
}
@@ -126,4 +134,3 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas
}
}
}
-
diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle
index dc2140a6086..5af0a412b4c 100644
--- a/plugins/repository-s3/build.gradle
+++ b/plugins/repository-s3/build.gradle
@@ -92,23 +92,26 @@ String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary")
// If all these variables are missing then we are testing against the internal fixture instead, which has the following
// credentials hard-coded in.
-if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath
- && !s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
-
+if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) {
s3PermanentAccessKey = 's3_integration_test_permanent_access_key'
s3PermanentSecretKey = 's3_integration_test_permanent_secret_key'
s3PermanentBucket = 'permanent-bucket-test'
s3PermanentBasePath = 'integration_test'
+ useFixture = true
+
+} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) {
+ throw new IllegalArgumentException("not all options specified to run against external S3 service")
+}
+
+if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) {
s3TemporaryAccessKey = 's3_integration_test_temporary_access_key'
s3TemporarySecretKey = 's3_integration_test_temporary_secret_key'
s3TemporaryBucket = 'temporary-bucket-test'
s3TemporaryBasePath = 'integration_test'
s3TemporarySessionToken = 's3_integration_test_temporary_session_token'
- useFixture = true
-} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath
- || !s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
+} else if (!s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) {
throw new IllegalArgumentException("not all options specified to run against external S3 service")
}
@@ -296,6 +299,13 @@ processTestResources {
MavenFilteringHack.filter(it, expansions)
}
+project.afterEvaluate {
+ if (useFixture == false) {
+ // 30_repository_temporary_credentials is not ready for CI yet
+ integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*'
+ }
+}
+
integTestCluster {
keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey
keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
index 39abd8613ca..834e238e4a0 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java
@@ -19,16 +19,27 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
+import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.support.broadcast.BroadcastShardResponse;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
import java.io.IOException;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
+
public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContentFragment {
private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT;
@@ -80,6 +91,14 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
this.nodeId = nodeId;
}
+ SnapshotIndexShardStatus(ShardId shardId, SnapshotIndexShardStage stage, SnapshotStats stats, String nodeId, String failure) {
+ super(shardId);
+ this.stage = stage;
+ this.stats = stats;
+ this.nodeId = nodeId;
+ this.failure = failure;
+ }
+
/**
* Returns snapshot stage
*/
@@ -143,7 +162,7 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Integer.toString(getShardId().getId()));
builder.field(Fields.STAGE, getStage());
- stats.toXContent(builder, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
if (getNodeId() != null) {
builder.field(Fields.NODE, getNodeId());
}
@@ -153,4 +172,72 @@ public class SnapshotIndexShardStatus extends BroadcastShardResponse implements
builder.endObject();
return builder;
}
+
+ static final ObjectParser.NamedObjectParser PARSER;
+ static {
+ ConstructingObjectParser innerParser = new ConstructingObjectParser<>(
+ "snapshot_index_shard_status", true,
+ (Object[] parsedObjects, ShardId shard) -> {
+ int i = 0;
+ String rawStage = (String) parsedObjects[i++];
+ String nodeId = (String) parsedObjects[i++];
+ String failure = (String) parsedObjects[i++];
+ SnapshotStats stats = (SnapshotStats) parsedObjects[i];
+
+ SnapshotIndexShardStage stage;
+ try {
+ stage = SnapshotIndexShardStage.valueOf(rawStage);
+ } catch (IllegalArgumentException iae) {
+ throw new ElasticsearchParseException(
+ "failed to parse snapshot index shard status [{}][{}], unknonwn stage [{}]",
+ shard.getIndex().getName(), shard.getId(), rawStage);
+ }
+ return new SnapshotIndexShardStatus(shard, stage, stats, nodeId, failure);
+ }
+ );
+ innerParser.declareString(constructorArg(), new ParseField(Fields.STAGE));
+ innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.NODE));
+ innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.REASON));
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS));
+ PARSER = (p, indexId, shardName) -> {
+ // Combine the index name in the context with the shard name passed in for the named object parser
+ // into a ShardId to pass as context for the inner parser.
+ int shard;
+ try {
+ shard = Integer.parseInt(shardName);
+ } catch (NumberFormatException nfe) {
+ throw new ElasticsearchParseException(
+ "failed to parse snapshot index shard status [{}], expected numeric shard id but got [{}]", indexId, shardName);
+ }
+ ShardId shardId = new ShardId(new Index(indexId, IndexMetaData.INDEX_UUID_NA_VALUE), shard);
+ return innerParser.parse(p, shardId);
+ };
+ }
+
+ public static SnapshotIndexShardStatus fromXContent(XContentParser parser, String indexId) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
+ return PARSER.parse(parser, indexId, parser.currentName());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotIndexShardStatus that = (SnapshotIndexShardStatus) o;
+
+ if (stage != that.stage) return false;
+ if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
+ if (nodeId != null ? !nodeId.equals(that.nodeId) : that.nodeId != null) return false;
+ return failure != null ? failure.equals(that.failure) : that.failure == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = stage != null ? stage.hashCode() : 0;
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0);
+ result = 31 * result + (failure != null ? failure.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
index 1605e41dc61..ba858495980 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java
@@ -19,17 +19,24 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
-import org.elasticsearch.common.xcontent.ToXContent.Params;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
+import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Represents snapshot status of all shards in the index
@@ -57,6 +64,14 @@ public class SnapshotIndexStatus implements Iterable,
this.indexShards = unmodifiableMap(indexShards);
}
+ public SnapshotIndexStatus(String index, Map indexShards, SnapshotShardsStats shardsStats,
+ SnapshotStats stats) {
+ this.index = index;
+ this.indexShards = indexShards;
+ this.shardsStats = shardsStats;
+ this.stats = stats;
+ }
+
/**
* Returns the index name
*/
@@ -97,8 +112,8 @@ public class SnapshotIndexStatus implements Iterable,
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getIndex());
- shardsStats.toXContent(builder, params);
- stats.toXContent(builder, params);
+ builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(Fields.SHARDS);
for (SnapshotIndexShardStatus shard : indexShards.values()) {
shard.toXContent(builder, params);
@@ -107,4 +122,61 @@ public class SnapshotIndexStatus implements Iterable,
builder.endObject();
return builder;
}
+
+ static final ObjectParser.NamedObjectParser PARSER;
+ static {
+ ConstructingObjectParser innerParser = new ConstructingObjectParser<>(
+ "snapshot_index_status", true,
+ (Object[] parsedObjects, String index) -> {
+ int i = 0;
+ SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
+ SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
+ @SuppressWarnings("unchecked") List shardStatuses =
+ (List) parsedObjects[i];
+
+ final Map indexShards;
+ if (shardStatuses == null || shardStatuses.isEmpty()) {
+ indexShards = emptyMap();
+ } else {
+ indexShards = new HashMap<>(shardStatuses.size());
+ for (SnapshotIndexShardStatus shardStatus : shardStatuses) {
+ indexShards.put(shardStatus.getShardId().getId(), shardStatus);
+ }
+ }
+ return new SnapshotIndexStatus(index, indexShards, shardsStats, stats);
+ });
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotShardsStats.PARSER.apply(p, null),
+ new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
+ innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p),
+ new ParseField(SnapshotStats.Fields.STATS));
+ innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS));
+ PARSER = ((p, c, name) -> innerParser.apply(p, name));
+ }
+
+ public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
+ return PARSER.parse(parser, null, parser.currentName());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotIndexStatus that = (SnapshotIndexStatus) o;
+
+ if (index != null ? !index.equals(that.index) : that.index != null) return false;
+ if (indexShards != null ? !indexShards.equals(that.indexShards) : that.indexShards != null) return false;
+ if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
+ return stats != null ? stats.equals(that.stats) : that.stats == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = index != null ? index.hashCode() : 0;
+ result = 31 * result + (indexShards != null ? indexShards.hashCode() : 0);
+ result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
index c74dd5af1ee..c0ac432292d 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java
@@ -19,17 +19,22 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.ToXContentFragment;
+import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.Collection;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
/**
* Status of a snapshot shards
*/
-public class SnapshotShardsStats implements ToXContentFragment {
+public class SnapshotShardsStats implements ToXContentObject {
private int initializingShards;
private int startedShards;
@@ -63,6 +68,16 @@ public class SnapshotShardsStats implements ToXContentFragment {
}
}
+ public SnapshotShardsStats(int initializingShards, int startedShards, int finalizingShards, int doneShards, int failedShards,
+ int totalShards) {
+ this.initializingShards = initializingShards;
+ this.startedShards = startedShards;
+ this.finalizingShards = finalizingShards;
+ this.doneShards = doneShards;
+ this.failedShards = failedShards;
+ this.totalShards = totalShards;
+ }
+
/**
* Number of shards with the snapshot in the initializing stage
*/
@@ -117,15 +132,68 @@ public class SnapshotShardsStats implements ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject(Fields.SHARDS_STATS);
- builder.field(Fields.INITIALIZING, getInitializingShards());
- builder.field(Fields.STARTED, getStartedShards());
- builder.field(Fields.FINALIZING, getFinalizingShards());
- builder.field(Fields.DONE, getDoneShards());
- builder.field(Fields.FAILED, getFailedShards());
- builder.field(Fields.TOTAL, getTotalShards());
+ builder.startObject();
+ {
+ builder.field(Fields.INITIALIZING, getInitializingShards());
+ builder.field(Fields.STARTED, getStartedShards());
+ builder.field(Fields.FINALIZING, getFinalizingShards());
+ builder.field(Fields.DONE, getDoneShards());
+ builder.field(Fields.FAILED, getFailedShards());
+ builder.field(Fields.TOTAL, getTotalShards());
+ }
builder.endObject();
return builder;
}
+ static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ Fields.SHARDS_STATS, true,
+ (Object[] parsedObjects) -> {
+ int i = 0;
+ int initializingShards = (int) parsedObjects[i++];
+ int startedShards = (int) parsedObjects[i++];
+ int finalizingShards = (int) parsedObjects[i++];
+ int doneShards = (int) parsedObjects[i++];
+ int failedShards = (int) parsedObjects[i++];
+ int totalShards = (int) parsedObjects[i];
+ return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
+ }
+ );
+ static {
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED));
+ PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL));
+ }
+
+ public static SnapshotShardsStats fromXContent(XContentParser parser) throws IOException {
+ return PARSER.apply(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotShardsStats that = (SnapshotShardsStats) o;
+
+ if (initializingShards != that.initializingShards) return false;
+ if (startedShards != that.startedShards) return false;
+ if (finalizingShards != that.finalizingShards) return false;
+ if (doneShards != that.doneShards) return false;
+ if (failedShards != that.failedShards) return false;
+ return totalShards == that.totalShards;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = initializingShards;
+ result = 31 * result + startedShards;
+ result = 31 * result + finalizingShards;
+ result = 31 * result + doneShards;
+ result = 31 * result + failedShards;
+ result = 31 * result + totalShards;
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
index 76f6b219184..6cb56bd88dc 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java
@@ -26,12 +26,14 @@ import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
-import org.elasticsearch.common.xcontent.ToXContentFragment;
+import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
import java.io.IOException;
-public class SnapshotStats implements Streamable, ToXContentFragment {
+public class SnapshotStats implements Streamable, ToXContentObject {
private long startTime;
private long time;
@@ -176,35 +178,132 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
@Override
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
- builder.startObject(Fields.STATS)
- // incremental starts
- .startObject(Fields.INCREMENTAL)
- .field(Fields.FILE_COUNT, getIncrementalFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()))
- // incremental ends
- .endObject();
+ builder.startObject();
+ {
+ builder.startObject(Fields.INCREMENTAL);
+ {
+ builder.field(Fields.FILE_COUNT, getIncrementalFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize()));
+ }
+ builder.endObject();
- if (getProcessedFileCount() != getIncrementalFileCount()) {
- // processed starts
- builder.startObject(Fields.PROCESSED)
- .field(Fields.FILE_COUNT, getProcessedFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()))
- // processed ends
- .endObject();
+ if (getProcessedFileCount() != getIncrementalFileCount()) {
+ builder.startObject(Fields.PROCESSED);
+ {
+ builder.field(Fields.FILE_COUNT, getProcessedFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize()));
+ }
+ builder.endObject();
+ }
+
+ builder.startObject(Fields.TOTAL);
+ {
+ builder.field(Fields.FILE_COUNT, getTotalFileCount());
+ builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()));
+ }
+ builder.endObject();
+
+ // timings stats
+ builder.field(Fields.START_TIME_IN_MILLIS, getStartTime());
+ builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
}
- // total starts
- builder.startObject(Fields.TOTAL)
- .field(Fields.FILE_COUNT, getTotalFileCount())
- .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize()))
- // total ends
- .endObject();
- // timings stats
- builder.field(Fields.START_TIME_IN_MILLIS, getStartTime())
- .humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime()));
-
return builder.endObject();
}
+ public static SnapshotStats fromXContent(XContentParser parser) throws IOException {
+ // Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the
+ // object has historically been written as JSON versus how it is structured in Java.
+ XContentParser.Token token = parser.currentToken();
+ if (token == null) {
+ token = parser.nextToken();
+ }
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ long startTime = 0;
+ long time = 0;
+ int incrementalFileCount = 0;
+ int totalFileCount = 0;
+ int processedFileCount = 0;
+ long incrementalSize = 0;
+ long totalSize = 0;
+ long processedSize = 0;
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String currentName = parser.currentName();
+ token = parser.nextToken();
+ if (currentName.equals(Fields.INCREMENTAL)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ incrementalFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ incrementalSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.PROCESSED)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ processedFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ processedSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.TOTAL)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
+ while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
+ String innerName = parser.currentName();
+ token = parser.nextToken();
+ if (innerName.equals(Fields.FILE_COUNT)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ totalFileCount = parser.intValue();
+ } else if (innerName.equals(Fields.SIZE_IN_BYTES)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ totalSize = parser.longValue();
+ } else {
+ // Unknown sub field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ } else if (currentName.equals(Fields.START_TIME_IN_MILLIS)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ startTime = parser.longValue();
+ } else if (currentName.equals(Fields.TIME_IN_MILLIS)) {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation);
+ time = parser.longValue();
+ } else {
+ // Unknown field, skip
+ if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) {
+ parser.skipChildren();
+ }
+ }
+ }
+ return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount, processedFileCount, incrementalSize, totalSize,
+ processedSize);
+ }
+
void add(SnapshotStats stats) {
incrementalFileCount += stats.incrementalFileCount;
totalFileCount += stats.totalFileCount;
@@ -229,4 +328,34 @@ public class SnapshotStats implements Streamable, ToXContentFragment {
time = endTime - startTime;
}
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotStats that = (SnapshotStats) o;
+
+ if (startTime != that.startTime) return false;
+ if (time != that.time) return false;
+ if (incrementalFileCount != that.incrementalFileCount) return false;
+ if (totalFileCount != that.totalFileCount) return false;
+ if (processedFileCount != that.processedFileCount) return false;
+ if (incrementalSize != that.incrementalSize) return false;
+ if (totalSize != that.totalSize) return false;
+ return processedSize == that.processedSize;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = (int) (startTime ^ (startTime >>> 32));
+ result = 31 * result + (int) (time ^ (time >>> 32));
+ result = 31 * result + incrementalFileCount;
+ result = 31 * result + totalFileCount;
+ result = 31 * result + processedFileCount;
+ result = 31 * result + (int) (incrementalSize ^ (incrementalSize >>> 32));
+ result = 31 * result + (int) (totalSize ^ (totalSize >>> 32));
+ result = 31 * result + (int) (processedSize ^ (processedSize >>> 32));
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
index f7545ea0236..618bb54c901 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java
@@ -20,15 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.Version;
+import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.SnapshotsInProgress.State;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.snapshots.Snapshot;
+import org.elasticsearch.snapshots.SnapshotId;
import java.io.IOException;
import java.util.ArrayList;
@@ -40,7 +46,11 @@ import java.util.Map;
import java.util.Objects;
import java.util.Set;
+import static java.util.Collections.emptyList;
+import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
/**
* Status of a snapshot
@@ -72,6 +82,18 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
updateShardStats();
}
+ private SnapshotStatus(Snapshot snapshot, State state, List shards,
+ Map indicesStatus, SnapshotShardsStats shardsStats,
+ SnapshotStats stats, Boolean includeGlobalState) {
+ this.snapshot = snapshot;
+ this.state = state;
+ this.shards = shards;
+ this.indicesStatus = indicesStatus;
+ this.shardsStats = shardsStats;
+ this.stats = stats;
+ this.includeGlobalState = includeGlobalState;
+ }
+
SnapshotStatus() {
}
@@ -207,8 +229,8 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
if (includeGlobalState != null) {
builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState);
}
- shardsStats.toXContent(builder, params);
- stats.toXContent(builder, params);
+ builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params);
+ builder.field(SnapshotStats.Fields.STATS, stats, params);
builder.startObject(INDICES);
for (SnapshotIndexStatus indexStatus : getIndices().values()) {
indexStatus.toXContent(builder, params);
@@ -218,6 +240,52 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
return builder;
}
+ static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "snapshot_status", true,
+ (Object[] parsedObjects) -> {
+ int i = 0;
+ String name = (String) parsedObjects[i++];
+ String repository = (String) parsedObjects[i++];
+ String uuid = (String) parsedObjects[i++];
+ String rawState = (String) parsedObjects[i++];
+ Boolean includeGlobalState = (Boolean) parsedObjects[i++];
+ SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]);
+ SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]);
+ @SuppressWarnings("unchecked") List indices = ((List) parsedObjects[i]);
+
+ Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid));
+ SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState);
+ Map indicesStatus;
+ List shards;
+ if (indices == null || indices.isEmpty()) {
+ indicesStatus = emptyMap();
+ shards = emptyList();
+ } else {
+ indicesStatus = new HashMap<>(indices.size());
+ shards = new ArrayList<>();
+ for (SnapshotIndexStatus index : indices) {
+ indicesStatus.put(index.getIndex(), index);
+ shards.addAll(index.getShards().values());
+ }
+ }
+ return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState);
+ });
+ static {
+ PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT));
+ PARSER.declareString(constructorArg(), new ParseField(REPOSITORY));
+ PARSER.declareString(constructorArg(), new ParseField(UUID));
+ PARSER.declareString(constructorArg(), new ParseField(STATE));
+ PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE));
+ PARSER.declareField(constructorArg(), SnapshotStats::fromXContent, new ParseField(SnapshotStats.Fields.STATS),
+ ObjectParser.ValueType.OBJECT);
+ PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS));
+ PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES));
+ }
+
+ public static SnapshotStatus fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
private void updateShardStats() {
stats = new SnapshotStats();
shardsStats = new SnapshotShardsStats(shards);
@@ -225,4 +293,31 @@ public class SnapshotStatus implements ToXContentObject, Streamable {
stats.add(shard.getStats());
}
}
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotStatus that = (SnapshotStatus) o;
+
+ if (snapshot != null ? !snapshot.equals(that.snapshot) : that.snapshot != null) return false;
+ if (state != that.state) return false;
+ if (indicesStatus != null ? !indicesStatus.equals(that.indicesStatus) : that.indicesStatus != null)
+ return false;
+ if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false;
+ if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false;
+ return includeGlobalState != null ? includeGlobalState.equals(that.includeGlobalState) : that.includeGlobalState == null;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = snapshot != null ? snapshot.hashCode() : 0;
+ result = 31 * result + (state != null ? state.hashCode() : 0);
+ result = 31 * result + (indicesStatus != null ? indicesStatus.hashCode() : 0);
+ result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0);
+ result = 31 * result + (stats != null ? stats.hashCode() : 0);
+ result = 31 * result + (includeGlobalState != null ? includeGlobalState.hashCode() : 0);
+ return result;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
index d44a490680c..ef1435e4108 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java
@@ -20,16 +20,21 @@
package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
+
/**
* Snapshot status response
*/
@@ -85,4 +90,33 @@ public class SnapshotsStatusResponse extends ActionResponse implements ToXConten
return builder;
}
+ private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(
+ "snapshots_status_response", true,
+ (Object[] parsedObjects) -> {
+ @SuppressWarnings("unchecked") List snapshots = (List) parsedObjects[0];
+ return new SnapshotsStatusResponse(snapshots);
+ }
+ );
+ static {
+ PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots"));
+ }
+
+ public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ SnapshotsStatusResponse response = (SnapshotsStatusResponse) o;
+
+ return snapshots != null ? snapshots.equals(response.snapshots) : response.snapshots == null;
+ }
+
+ @Override
+ public int hashCode() {
+ return snapshots != null ? snapshots.hashCode() : 0;
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
index d978e214fc9..90380205012 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java
@@ -23,7 +23,6 @@ import com.carrotsearch.hppc.LongArrayList;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
-
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.rollover.RolloverInfo;
import org.elasticsearch.action.support.ActiveShardCount;
@@ -56,10 +55,11 @@ import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.rest.RestStatus;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.IOException;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumSet;
@@ -1345,7 +1345,7 @@ public class IndexMetaData implements Diffable, ToXContentFragmen
}
Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null);
if (creationDate != null) {
- DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC);
+ ZonedDateTime creationDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(creationDate), ZoneOffset.UTC);
builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString());
}
return builder.build();
diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
index be9db5262b0..b19d65090c6 100644
--- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
+++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java
@@ -73,11 +73,10 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.InvalidIndexNameException;
import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason;
import org.elasticsearch.threadpool.ThreadPool;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
import java.io.UnsupportedEncodingException;
import java.nio.file.Path;
+import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -383,7 +382,7 @@ public class MetaDataCreateIndexService extends AbstractComponent {
}
if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) {
- indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis());
+ indexSettingsBuilder.put(SETTING_CREATION_DATE, Instant.now().toEpochMilli());
}
indexSettingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, request.getProvidedName());
indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID());
diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
index e781c979ed9..eef2ab55587 100644
--- a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
+++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java
@@ -52,12 +52,6 @@ import static java.time.temporal.ChronoField.SECOND_OF_MINUTE;
public class DateFormatters {
- private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
- .optionalStart().appendZoneId().optionalEnd()
- .optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
- .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
- .toFormatter(Locale.ROOT);
-
private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder()
.appendZoneId()
.toFormatter(Locale.ROOT);
@@ -70,12 +64,80 @@ public class DateFormatters {
.appendOffset("+HH:mm", "Z")
.toFormatter(Locale.ROOT);
+ private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
+ .optionalStart().appendZoneId().optionalEnd()
+ .optionalStart().appendOffset("+HHmm", "Z").optionalEnd()
+ .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd()
+ .toFormatter(Locale.ROOT);
+
private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder()
.optionalStart()
.append(TIME_ZONE_FORMATTER)
.optionalEnd()
.toFormatter(Locale.ROOT);
+ private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
+ .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
+ .appendLiteral("-")
+ .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral('-')
+ .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
+ .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral(':')
+ .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
+ .appendLiteral(':')
+ .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_1 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_WITHOUT_COLON)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_WITH_COLON)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder()
+ .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
+ .optionalStart()
+ .appendLiteral('T')
+ .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
+ .optionalStart()
+ .appendFraction(MILLI_OF_SECOND, 3, 3, true)
+ .optionalEnd()
+ .optionalStart()
+ .append(TIME_ZONE_FORMATTER_ZONE_ID)
+ .optionalEnd()
+ .optionalEnd()
+ .toFormatter(Locale.ROOT);
+
+ private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME =
+ new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2,
+ STRICT_DATE_OPTIONAL_TIME_FORMATTER_3);
+
private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder()
.appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
.appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
@@ -258,7 +320,8 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
+ private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer,
+ new DateTimeFormatterBuilder()
.append(DATE_FORMATTER)
.parseLenient()
.optionalStart()
@@ -560,14 +623,6 @@ public class DateFormatters {
private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT));
- private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder()
- .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
- .appendLiteral("-")
- .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral('-')
- .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE)
- .toFormatter(Locale.ROOT);
-
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER);
private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
@@ -580,14 +635,6 @@ public class DateFormatters {
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.toFormatter(Locale.ROOT));
- private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder()
- .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral(':')
- .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE)
- .appendLiteral(':')
- .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE)
- .toFormatter(Locale.ROOT);
-
private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND =
new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER);
@@ -601,18 +648,6 @@ public class DateFormatters {
.append(OPTIONAL_TIME_ZONE_FORMATTER)
.toFormatter(Locale.ROOT));
- private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder()
- .append(STRICT_YEAR_MONTH_DAY_FORMATTER)
- .optionalStart()
- .appendLiteral('T')
- .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER)
- .optionalStart()
- .appendFraction(MILLI_OF_SECOND, 3, 3, true)
- .optionalEnd()
- .append(OPTIONAL_TIME_ZONE_FORMATTER)
- .optionalEnd()
- .toFormatter(Locale.ROOT));
-
private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(
new DateTimeFormatterBuilder()
.appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
@@ -918,8 +953,8 @@ public class DateFormatters {
return forPattern(formats[0], locale);
} else {
Collection parsers = new LinkedHashSet<>(formats.length);
- for (int i = 0; i < formats.length; i++) {
- CompoundDateTimeFormatter dateTimeFormatter = forPattern(formats[i], locale);
+ for (String format : formats) {
+ CompoundDateTimeFormatter dateTimeFormatter = forPattern(format, locale);
try {
parsers.addAll(Arrays.asList(dateTimeFormatter.parsers));
} catch (IllegalArgumentException e) {
diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
index 52da10a3785..3a76c7ca0c9 100644
--- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
+++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
@@ -45,9 +45,10 @@ import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.RestActionListener;
import org.elasticsearch.rest.action.RestResponseListener;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
@@ -379,7 +380,7 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted());
table.addCell(indexMetaData.getCreationDate());
- table.addCell(new DateTime(indexMetaData.getCreationDate(), DateTimeZone.UTC));
+ table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC));
table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java
new file mode 100644
index 00000000000..490319ef840
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.function.Predicate;
+
+import org.elasticsearch.cluster.metadata.IndexMetaData;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.index.Index;
+import org.elasticsearch.index.shard.ShardId;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotIndexShardStatusTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotIndexShardStatus createTestInstance() {
+ return createForIndex(randomAlphaOfLength(10));
+ }
+
+ protected SnapshotIndexShardStatus createForIndex(String indexName) {
+ ShardId shardId = new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomIntBetween(0, 500));
+ SnapshotIndexShardStage stage = randomFrom(SnapshotIndexShardStage.values());
+ SnapshotStats stats = new SnapshotStatsTests().createTestInstance();
+ String nodeId = randomAlphaOfLength(20);
+ String failure = null;
+ if (rarely()) {
+ failure = randomAlphaOfLength(200);
+ }
+ return new SnapshotIndexShardStatus(shardId, stage, stats, nodeId, failure);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the root object since its fields correspond to shard names.
+ return String::isEmpty;
+ }
+
+ @Override
+ protected SnapshotIndexShardStatus doParseInstance(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
+ SnapshotIndexShardStatus status = SnapshotIndexShardStatus.fromXContent(parser, parser.currentName());
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ return status;
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java
new file mode 100644
index 00000000000..92eb355f3a6
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParserUtils;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+
+public class SnapshotIndexStatusTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotIndexStatus createTestInstance() {
+ String index = randomAlphaOfLength(10);
+ List shardStatuses = new ArrayList<>();
+ SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
+ for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
+ shardStatuses.add(builder.createForIndex(index));
+ }
+ return new SnapshotIndexStatus(index, shardStatuses);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the root object or the shards field since their fields correspond to names.
+ return (s) -> s.isEmpty() || s.endsWith("shards");
+ }
+
+ @Override
+ protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException {
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation);
+ SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser);
+ XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
+ return status;
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java
new file mode 100644
index 00000000000..ac00896983d
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotShardsStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotShardsStats createTestInstance() {
+ int initializingShards = randomInt();
+ int startedShards = randomInt();
+ int finalizingShards = randomInt();
+ int doneShards = randomInt();
+ int failedShards = randomInt();
+ int totalShards = randomInt();
+ return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards);
+ }
+
+ @Override
+ protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotShardsStats.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java
new file mode 100644
index 00000000000..2822a9661fd
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotStatsTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotStats createTestInstance() {
+ long startTime = randomNonNegativeLong();
+ long time = randomNonNegativeLong();
+ int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE);
+ long incrementalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ long totalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ long processedSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2;
+ return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount,
+ processedFileCount, incrementalSize, totalSize, processedSize);
+ }
+
+ @Override
+ protected SnapshotStats doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotStats.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
index 3ece0f9f107..dbd45640c7b 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java
@@ -21,16 +21,19 @@ package org.elasticsearch.action.admin.cluster.snapshots.status;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.common.UUIDs;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
-import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.AbstractXContentTestCase;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
+import java.util.function.Predicate;
-public class SnapshotStatusTests extends ESTestCase {
+public class SnapshotStatusTests extends AbstractXContentTestCase {
public void testToString() throws Exception {
@@ -146,4 +149,39 @@ public class SnapshotStatusTests extends ESTestCase {
"}";
assertEquals(expected, status.toString());
}
+
+ @Override
+ protected SnapshotStatus createTestInstance() {
+ SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values());
+ String uuid = UUIDs.randomBase64UUID();
+ SnapshotId id = new SnapshotId("test-snap", uuid);
+ Snapshot snapshot = new Snapshot("test-repo", id);
+
+ SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests();
+ builder.createTestInstance();
+
+ List snapshotIndexShardStatuses = new ArrayList<>();
+ for (int idx = 0; idx < randomIntBetween(0, 10); idx++) {
+ SnapshotIndexShardStatus snapshotIndexShardStatus = builder.createTestInstance();
+ snapshotIndexShardStatuses.add(snapshotIndexShardStatus);
+ }
+ boolean includeGlobalState = randomBoolean();
+ return new SnapshotStatus(snapshot, state, snapshotIndexShardStatuses, includeGlobalState);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the indices field or shards field since their fields correspond to names.
+ return (s) -> s.endsWith("shards") || s.endsWith("indices");
+ }
+
+ @Override
+ protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotStatus.fromXContent(parser);
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java
new file mode 100644
index 00000000000..d1ad028296d
--- /dev/null
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.action.admin.cluster.snapshots.status;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.function.Predicate;
+
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+public class SnapshotsStatusResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException {
+ return SnapshotsStatusResponse.fromXContent(parser);
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ // Do not place random fields in the indices field or shards field since their fields correspond to names.
+ return (s) -> s.endsWith("shards") || s.endsWith("indices");
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ @Override
+ protected SnapshotsStatusResponse createTestInstance() {
+ SnapshotStatusTests statusBuilder = new SnapshotStatusTests();
+ List snapshotStatuses = new ArrayList<>();
+ for (int idx = 0; idx < randomIntBetween(0, 5); idx++) {
+ snapshotStatuses.add(statusBuilder.createTestInstance());
+ }
+ return new SnapshotsStatusResponse(snapshotStatuses);
+ }
+}
diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
index 4d86dbbc51f..5379769e819 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java
@@ -25,16 +25,16 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -197,8 +197,8 @@ public class RolloverIT extends ESIntegTestCase {
}
public void testRolloverWithDateMath() {
- DateTime now = new DateTime(DateTimeZone.UTC);
- String index = "test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-1";
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1";
String dateMathExp = "";
assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get());
ensureGreen(index);
@@ -212,14 +212,14 @@ public class RolloverIT extends ESIntegTestCase {
ensureGreen(index);
RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get();
assertThat(response.getOldIndex(), equalTo(index));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
response = client().admin().indices().prepareRolloverIndex("test_alias").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
@@ -232,8 +232,8 @@ public class RolloverIT extends ESIntegTestCase {
IndexMetaData.SETTING_INDEX_PROVIDED_NAME));
response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("").get();
- assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003"));
- assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-000004"));
+ assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003"));
+ assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004"));
assertThat(response.isDryRun(), equalTo(false));
assertThat(response.isRolledOver(), equalTo(true));
assertThat(response.getConditionStatus().size(), equalTo(0));
diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
index 9be087e0e5d..83c615e4882 100644
--- a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
+++ b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java
@@ -22,8 +22,10 @@ package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+
+import java.time.Instant;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import static org.elasticsearch.test.VersionUtils.randomVersion;
@@ -42,6 +44,7 @@ public class HumanReadableIndexSettingsTests extends ESTestCase {
assertEquals(versionCreated.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_CREATED_STRING, null));
assertEquals(versionUpgraded.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_UPGRADED_STRING, null));
- assertEquals(new DateTime(created, DateTimeZone.UTC).toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null));
+ ZonedDateTime creationDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(created), ZoneOffset.UTC);
+ assertEquals(creationDate.toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null));
}
}
diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
index 7c6f0872288..d6f733d7c1c 100644
--- a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
+++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java
@@ -23,7 +23,9 @@ import org.elasticsearch.common.time.CompoundDateTimeFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.ESTestCase;
import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
@@ -354,11 +356,109 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
assertParseException("2012-W1-1", "strict_weekyear_week_day");
}
+ public void testSamePrinterOutput() {
+ int year = randomIntBetween(1970, 2030);
+ int month = randomIntBetween(1, 12);
+ int day = randomIntBetween(1, 28);
+ int hour = randomIntBetween(0, 23);
+ int minute = randomIntBetween(0, 59);
+ int second = randomIntBetween(0, 59);
+
+ ZonedDateTime javaDate = ZonedDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC);
+ DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC);
+
+ assertSamePrinterOutput("basicDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicOrdinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicTTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("basicWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("date", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHour", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("dateHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("dateOptionalTime", javaDate, jodaDate);
+ assertSamePrinterOutput("dateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("dateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("hour", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("hourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("ordinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("time", javaDate, jodaDate);
+ assertSamePrinterOutput("timeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("tTime", javaDate, jodaDate);
+ assertSamePrinterOutput("tTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("weekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyear", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyearWeek", javaDate, jodaDate);
+ assertSamePrinterOutput("weekyearWeekDay", javaDate, jodaDate);
+ assertSamePrinterOutput("year", javaDate, jodaDate);
+ assertSamePrinterOutput("yearMonth", javaDate, jodaDate);
+ assertSamePrinterOutput("yearMonthDay", javaDate, jodaDate);
+ assertSamePrinterOutput("epoch_second", javaDate, jodaDate);
+ assertSamePrinterOutput("epoch_millis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictBasicWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHour", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateOptionalTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHour", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinute", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecond", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecondFraction", javaDate, jodaDate);
+ assertSamePrinterOutput("strictHourMinuteSecondMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictOrdinalDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictTTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDate", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDateTime", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekDateTimeNoMillis", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyear", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyearWeek", javaDate, jodaDate);
+ assertSamePrinterOutput("strictWeekyearWeekDay", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYear", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate);
+ assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate);
+ }
+
public void testSeveralTimeFormats() {
assertSameDate("2018-12-12", "year_month_day||ordinal_date");
assertSameDate("2018-128", "year_month_day||ordinal_date");
}
+ private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) {
+ assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000));
+ String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate);
+ String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate);
+ assertThat(javaTimeOut, is(jodaTimeOut));
+ }
+
private void assertSameDate(String input, String format) {
FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format);
DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input);
diff --git a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
index 6d8e1a41c5b..229cb99fbfb 100644
--- a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
+++ b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java
@@ -28,12 +28,12 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.ISODateTimeFormat;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@@ -249,8 +249,9 @@ public class ExplainActionIT extends ESIntegTestCase {
public void testExplainDateRangeInQueryString() {
createIndex("test");
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
index 70a633f02f4..0f12305f239 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
@@ -30,11 +30,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
-import org.joda.time.chrono.ISOChronology;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
@@ -255,7 +255,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings).get());
assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(settings).get());
- DateTime now = new DateTime(ISOChronology.getInstanceUTC());
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now),
client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
@@ -456,9 +456,9 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.setSettings(settings)
.addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")))
.get());
- DateTime now = new DateTime(DateTimeZone.UTC);
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at",
- DateTimeFormat.forPattern("YYYY-MM-dd").print(now)).get();
+ DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get();
refresh();
assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
index af1104879e9..4a85c2c1453 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
@@ -22,10 +22,10 @@ package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongHashSet;
import com.carrotsearch.hppc.LongSet;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
@@ -40,10 +40,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -123,8 +122,9 @@ public class MinDocCountIT extends AbstractTermsTestCase {
longTerm = randomInt(cardinality * 2);
} while (!longTerms.add(longTerm));
double doubleTerm = longTerm * Math.PI;
- String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd")
- .print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC));
+
+ ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC);
+ String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time);
final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20);
for (int j = 0; j < frequency; ++j) {
indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index 35c5a19cc2e..e5af22cd2ae 100644
--- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -63,10 +63,10 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
@@ -2865,7 +2865,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
"field", "type=text,store=true,term_vector=with_positions_offsets")
.setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2))
.get());
- DateTime now = new DateTime(ISOChronology.getInstanceUTC());
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"),
client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"),
client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world"));
diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
index ab5387b6e3f..452c00b9906 100644
--- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
+++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java
@@ -28,8 +28,8 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.document.DocumentField;
-import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@@ -48,8 +48,9 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
-import org.joda.time.ReadableDateTime;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
@@ -546,6 +547,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
+ ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("byte_field", (byte) 1)
.field("short_field", (short) 2)
@@ -553,7 +555,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
- .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)))
+ .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true)
.field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8")))
.endObject()).execute().actionGet();
@@ -578,7 +580,6 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field",
"float_field", "double_field", "date_field", "boolean_field", "binary_field")));
-
SearchHit searchHit = searchResponse.getHits().getAt(0);
assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1"));
assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2"));
@@ -586,7 +587,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f));
assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
- String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC));
+ String dateTime = DateFormatters.forPattern("dateOptionalTime").format(date);
assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime));
assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE));
assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8"))));
@@ -756,7 +757,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet();
- ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC);
+ ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC);
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
.field("text_field", "foo")
.field("keyword_field", "foo")
@@ -766,7 +767,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
.field("long_field", 4L)
.field("float_field", 5.0f)
.field("double_field", 6.0d)
- .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date))
+ .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date))
.field("boolean_field", true)
.field("binary_field", new byte[] {42, 100})
.field("ip_field", "::1")
@@ -802,7 +803,8 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L));
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
- assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date));
+ assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
+ equalTo(new DateTime(date.toInstant().toEpochMilli(), DateTimeZone.UTC)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@@ -839,7 +841,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
- equalTo(Joda.forPattern("dateOptionalTime").printer().print(date)));
+ equalTo(DateFormatters.forPattern("dateOptionalTime").format(date)));
assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true));
assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo"));
@@ -869,7 +871,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0"));
assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(),
- equalTo(Joda.forPattern("epoch_millis").printer().print(date)));
+ equalTo(DateFormatters.forPattern("epoch_millis").format(date)));
}
public void testScriptFields() throws Exception {
diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
index d6acdf11cb2..a21893db392 100644
--- a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
+++ b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java
@@ -43,9 +43,9 @@ import org.elasticsearch.search.SearchHits;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -562,27 +562,27 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
}
public void testDateWithoutOrigin() throws Exception {
- DateTime dt = new DateTime(DateTimeZone.UTC);
+ ZonedDateTime dt = ZonedDateTime.now(ZoneOffset.UTC);
assertAcked(prepareCreate("test").addMapping(
"type1",
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text")
.endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject()));
- DateTime docDate = dt.minusDays(1);
- String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ ZonedDateTime docDate = dt.minusDays(1);
+ String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(2);
- docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("2")
.source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet();
docDate = dt.minusDays(3);
- docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-"
+ docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-"
+ String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth());
client().index(
indexRequest("test").type("type1").id("3")
diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index eab3a6e9b48..be71867edd2 100644
--- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -52,6 +52,9 @@ import org.joda.time.DateTimeZone;
import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
import java.util.Collection;
import java.util.Collections;
import java.util.Random;
@@ -480,8 +483,9 @@ public class SearchQueryIT extends ESIntegTestCase {
"type", "past", "type=date", "future", "type=date"
));
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
refresh();
diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 8b3aff90e8d..cdbc2c702d8 100644
--- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -35,12 +35,13 @@ import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.hamcrest.Matcher;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.ISODateTimeFormat;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
+import java.time.ZoneOffset;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.List;
@@ -124,8 +125,9 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
.put(indexSettings())
.put("index.number_of_shards", 1)));
- String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1));
- String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1));
+ ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC);
+ String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS));
+ String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS));
client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get();
@@ -137,10 +139,10 @@ public class SimpleValidateQueryIT extends ESIntegTestCase {
assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1));
assertThat(response.getQueryExplanation().get(0).getError(), nullValue());
- DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay();
- DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1);
- assertThat(response.getQueryExplanation().get(0).getExplanation(),
- equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]"));
+
+ long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000;
+ long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1;
+ assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]"));
assertThat(response.isValid(), equalTo(true));
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
index 81a9598496b..937adddf3a4 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
@@ -21,12 +21,6 @@ package org.elasticsearch.test.rest;
import org.apache.http.Header;
import org.apache.http.HttpHost;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpHead;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.apache.http.ssl.SSLContexts;
@@ -68,16 +62,12 @@ import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-import static java.util.Collections.emptyMap;
-import static java.util.Collections.singletonMap;
import static java.util.Collections.sort;
import static java.util.Collections.unmodifiableList;
import static org.hamcrest.Matchers.anyOf;
@@ -307,25 +297,25 @@ public abstract class ESRestTestCase extends ESTestCase {
* the snapshots intact in the repository.
*/
private void wipeSnapshots() throws IOException {
- for (Map.Entry repo : entityAsMap(adminClient.performRequest("GET", "_snapshot/_all")).entrySet()) {
+ for (Map.Entry repo : entityAsMap(adminClient.performRequest(new Request("GET", "/_snapshot/_all"))).entrySet()) {
String repoName = repo.getKey();
Map, ?> repoSpec = (Map, ?>) repo.getValue();
String repoType = (String) repoSpec.get("type");
if (false == preserveSnapshotsUponCompletion() && repoType.equals("fs")) {
// All other repo types we really don't have a chance of being able to iterate properly, sadly.
- String url = "_snapshot/" + repoName + "/_all";
- Map params = singletonMap("ignore_unavailable", "true");
- List> snapshots = (List>) entityAsMap(adminClient.performRequest("GET", url, params)).get("snapshots");
+ Request listRequest = new Request("GET", "/_snapshot/" + repoName + "/_all");
+ listRequest.addParameter("ignore_unavailable", "true");
+ List> snapshots = (List>) entityAsMap(adminClient.performRequest(listRequest)).get("snapshots");
for (Object snapshot : snapshots) {
Map, ?> snapshotInfo = (Map, ?>) snapshot;
String name = (String) snapshotInfo.get("snapshot");
logger.debug("wiping snapshot [{}/{}]", repoName, name);
- adminClient().performRequest("DELETE", "_snapshot/" + repoName + "/" + name);
+ adminClient().performRequest(new Request("DELETE", "/_snapshot/" + repoName + "/" + name));
}
}
if (preserveReposUponCompletion() == false) {
logger.debug("wiping snapshot repository [{}]", repoName);
- adminClient().performRequest("DELETE", "_snapshot/" + repoName);
+ adminClient().performRequest(new Request("DELETE", "_snapshot/" + repoName));
}
}
}
@@ -334,7 +324,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* Remove any cluster settings.
*/
private void wipeClusterSettings() throws IOException {
- Map, ?> getResponse = entityAsMap(adminClient().performRequest("GET", "/_cluster/settings"));
+ Map, ?> getResponse = entityAsMap(adminClient().performRequest(new Request("GET", "/_cluster/settings")));
boolean mustClear = false;
XContentBuilder clearCommand = JsonXContent.contentBuilder();
@@ -355,8 +345,9 @@ public abstract class ESRestTestCase extends ESTestCase {
clearCommand.endObject();
if (mustClear) {
- adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity(
- Strings.toString(clearCommand), ContentType.APPLICATION_JSON));
+ Request request = new Request("PUT", "/_cluster/settings");
+ request.setJsonEntity(Strings.toString(clearCommand));
+ adminClient().performRequest(request);
}
}
@@ -365,7 +356,7 @@ public abstract class ESRestTestCase extends ESTestCase {
* other tests.
*/
private void logIfThereAreRunningTasks() throws InterruptedException, IOException {
- Set runningTasks = runningTasks(adminClient().performRequest("GET", "_tasks"));
+ Set runningTasks = runningTasks(adminClient().performRequest(new Request("GET", "/_tasks")));
// Ignore the task list API - it doesn't count against us
runningTasks.remove(ListTasksAction.NAME);
runningTasks.remove(ListTasksAction.NAME + "[n]");
@@ -389,7 +380,7 @@ public abstract class ESRestTestCase extends ESTestCase {
private void waitForClusterStateUpdatesToFinish() throws Exception {
assertBusy(() -> {
try {
- Response response = adminClient().performRequest("GET", "_cluster/pending_tasks");
+ Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks"));
List> tasks = (List>) entityAsMap(response).get("tasks");
if (false == tasks.isEmpty()) {
StringBuilder message = new StringBuilder("there are still running tasks:");
@@ -514,12 +505,12 @@ public abstract class ESRestTestCase extends ESTestCase {
* @param index index to test for
**/
protected static void ensureGreen(String index) throws IOException {
- Map params = new HashMap<>();
- params.put("wait_for_status", "green");
- params.put("wait_for_no_relocating_shards", "true");
- params.put("timeout", "70s");
- params.put("level", "shards");
- assertOK(client().performRequest("GET", "_cluster/health/" + index, params));
+ Request request = new Request("GET", "/_cluster/health/" + index);
+ request.addParameter("wait_for_status", "green");
+ request.addParameter("wait_for_no_relocating_shards", "true");
+ request.addParameter("timeout", "70s");
+ request.addParameter("level", "shards");
+ client().performRequest(request);
}
/**
@@ -527,11 +518,11 @@ public abstract class ESRestTestCase extends ESTestCase {
* in the cluster and doesn't require to know how many nodes/replica there are.
*/
protected static void ensureNoInitializingShards() throws IOException {
- Map params = new HashMap<>();
- params.put("wait_for_no_initializing_shards", "true");
- params.put("timeout", "70s");
- params.put("level", "shards");
- assertOK(client().performRequest("GET", "_cluster/health/", params));
+ Request request = new Request("GET", "/_cluster/health");
+ request.addParameter("wait_for_no_initializing_shards", "true");
+ request.addParameter("timeout", "70s");
+ request.addParameter("level", "shards");
+ client().performRequest(request);
}
protected static void createIndex(String name, Settings settings) throws IOException {
@@ -539,9 +530,10 @@ public abstract class ESRestTestCase extends ESTestCase {
}
protected static void createIndex(String name, Settings settings, String mapping) throws IOException {
- assertOK(client().performRequest(HttpPut.METHOD_NAME, name, Collections.emptyMap(),
- new StringEntity("{ \"settings\": " + Strings.toString(settings)
- + ", \"mappings\" : {" + mapping + "} }", ContentType.APPLICATION_JSON)));
+ Request request = new Request("PUT", "/" + name);
+ request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings)
+ + ", \"mappings\" : {" + mapping + "} }");
+ client().performRequest(request);
}
protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException {
@@ -549,42 +541,42 @@ public abstract class ESRestTestCase extends ESTestCase {
}
private static void updateIndexSettings(String index, Settings settings) throws IOException {
- assertOK(client().performRequest("PUT", index + "/_settings", Collections.emptyMap(),
- new StringEntity(Strings.toString(settings), ContentType.APPLICATION_JSON)));
+ Request request = new Request("PUT", "/" + index + "/_settings");
+ request.setJsonEntity(Strings.toString(settings));
+ client().performRequest(request);
}
protected static Map getIndexSettings(String index) throws IOException {
- Map params = new HashMap<>();
- params.put("flat_settings", "true");
- Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_settings", params);
- assertOK(response);
+ Request request = new Request("GET", "/" + index + "/_settings");
+ request.addParameter("flat_settings", "true");
+ Response response = client().performRequest(request);
try (InputStream is = response.getEntity().getContent()) {
return XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true);
}
}
protected static boolean indexExists(String index) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, index);
+ Response response = client().performRequest(new Request("HEAD", "/" + index));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
protected static void closeIndex(String index) throws IOException {
- Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_close");
+ Response response = client().performRequest(new Request("POST", "/" + index + "/_close"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
protected static void openIndex(String index) throws IOException {
- Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_open");
+ Response response = client().performRequest(new Request("POST", "/" + index + "/_open"));
assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
}
protected static boolean aliasExists(String alias) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, "/_alias/" + alias);
+ Response response = client().performRequest(new Request("HEAD", "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
protected static boolean aliasExists(String index, String alias) throws IOException {
- Response response = client().performRequest(HttpHead.METHOD_NAME, "/" + index + "/_alias/" + alias);
+ Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + alias));
return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode();
}
@@ -602,7 +594,7 @@ public abstract class ESRestTestCase extends ESTestCase {
}
protected static Map getAsMap(final String endpoint) throws IOException {
- Response response = client().performRequest(HttpGet.METHOD_NAME, endpoint);
+ Response response = client().performRequest(new Request("GET", endpoint));
XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue());
Map responseEntity = XContentHelper.convertToMap(entityContentType.xContent(),
response.getEntity().getContent(), false);
diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
index 69f4e0666ea..b97b4e8f6da 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java
@@ -47,7 +47,6 @@ import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
@@ -282,7 +281,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException {
// we simply go to the _cat/nodes API and parse all versions in the cluster
- Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master"));
+ Request request = new Request("GET", "/_cat/nodes");
+ request.addParameter("h", "version,master");
+ Response response = restClient.performRequest(request);
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
String nodesCatResponse = restTestResponse.getBodyAsString();
String[] split = nodesCatResponse.split("\n");
@@ -310,7 +311,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
Version version = null;
for (int i = 0; i < numHosts; i++) {
//we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster
- Response response = restClient.performRequest("GET", "/");
+ Response response = restClient.performRequest(new Request("GET", "/"));
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response);
Object latestVersion = restTestResponse.evaluate("version.number");
if (latestVersion == null) {
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
index 9ff578be50b..6e9652bdfa2 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java
@@ -353,7 +353,8 @@ public class DataDescription implements ToXContentObject, Writeable {
try {
DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC);
} catch (IllegalArgumentException e) {
- throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format));
+ throw ExceptionsHelper.badRequestException(
+ Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format), e.getCause());
}
}
timeFormat = format;
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
index 556c2f37b48..0efb5feb38b 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java
@@ -54,9 +54,9 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
.parseDefaulting(ChronoField.YEAR_OF_ERA, LocalDate.now(defaultTimezone).getYear())
.toFormatter();
- String now = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC));
+ String formattedTime = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC));
try {
- TemporalAccessor parsed = formatter.parse(now);
+ TemporalAccessor parsed = formatter.parse(formattedTime);
boolean hasTimeZone = parsed.isSupported(ChronoField.INSTANT_SECONDS);
if (hasTimeZone) {
Instant.from(parsed);
@@ -67,7 +67,7 @@ public class DateTimeFormatterTimestampConverter implements TimestampConverter {
return new DateTimeFormatterTimestampConverter(formatter, hasTimeZone, defaultTimezone);
}
catch (DateTimeException e) {
- throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern);
+ throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern, e);
}
}
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
index 3ca4bac47cb..bb7c329cf45 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java
@@ -17,6 +17,8 @@ import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
+import java.time.DateTimeException;
+
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@@ -51,8 +53,12 @@ public class DataDescriptionTests extends AbstractSerializingTestCase description.setTimeFormat("y-M-dd"));
assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage());
expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY"));
+
+ Throwable cause = e.getCause();
+ assertNotNull(cause);
+ assertThat(cause, instanceOf(DateTimeException.class));
}
public void testTransform_GivenDelimitedAndEpoch() {
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
index ea0b7da161c..2c3288babd6 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
@@ -74,9 +74,14 @@ queryNoWith
: queryTerm
/** we could add sort by - sort per partition */
(ORDER BY orderBy (',' orderBy)*)?
- (LIMIT limit=(INTEGER_VALUE | ALL))?
+ limitClause?
;
+limitClause
+ : LIMIT limit=(INTEGER_VALUE | ALL)
+ | LIMIT_ESC limit=(INTEGER_VALUE | ALL) ESC_END
+ ;
+
queryTerm
: querySpecification #queryPrimaryDefault
| '(' queryNoWith ')' #subquery
@@ -185,7 +190,12 @@ predicate
;
pattern
- : value=string (ESCAPE escape=string)?
+ : value=string patternEscape?
+ ;
+
+patternEscape
+ : ESCAPE escape=string
+ | ESCAPE_ESC escape=string '}'
;
valueExpression
@@ -197,18 +207,44 @@ valueExpression
;
primaryExpression
- : CAST '(' expression AS dataType ')' #cast
- | EXTRACT '(' field=identifier FROM valueExpression ')' #extract
+ : castExpression #cast
+ | extractExpression #extract
| constant #constantDefault
| ASTERISK #star
| (qualifiedName DOT)? ASTERISK #star
- | identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall
+ | functionExpression #function
| '(' query ')' #subqueryExpression
| identifier #columnReference
| qualifiedName #dereference
| '(' expression ')' #parenthesizedExpression
;
+castExpression
+ : castTemplate
+ | FUNCTION_ESC castTemplate ESC_END
+ ;
+
+castTemplate
+ : CAST '(' expression AS dataType ')'
+ ;
+
+extractExpression
+ : extractTemplate
+ | FUNCTION_ESC extractTemplate ESC_END
+ ;
+
+extractTemplate
+ : EXTRACT '(' field=identifier FROM valueExpression ')'
+ ;
+
+functionExpression
+ : functionTemplate
+ | FUNCTION_ESC functionTemplate '}'
+ ;
+
+functionTemplate
+ : identifier '(' (setQuantifier? expression (',' expression)*)? ')'
+ ;
constant
: NULL #nullLiteral
@@ -216,6 +252,10 @@ constant
| booleanValue #booleanLiteral
| STRING+ #stringLiteral
| PARAM #paramLiteral
+ | DATE_ESC string ESC_END #dateEscapedLiteral
+ | TIME_ESC string ESC_END #timeEscapedLiteral
+ | TIMESTAMP_ESC string ESC_END #timestampEscapedLiteral
+ | GUID_ESC string ESC_END #guidEscapedLiteral
;
comparisonOperator
@@ -351,6 +391,18 @@ VERIFY: 'VERIFY';
WHERE: 'WHERE';
WITH: 'WITH';
+// Escaped Sequence
+ESCAPE_ESC: '{ESCAPE';
+FUNCTION_ESC: '{FN';
+LIMIT_ESC:'{LIMIT';
+DATE_ESC: '{D';
+TIME_ESC: '{T';
+TIMESTAMP_ESC: '{TS';
+// mapped to string literal
+GUID_ESC: '{GUID';
+
+ESC_END: '}';
+
EQ : '=';
NEQ : '<>' | '!=' | '<=>';
LT : '<';
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
index 87cf9a4809d..527cc676e1d 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens
@@ -69,33 +69,41 @@ USING=68
VERIFY=69
WHERE=70
WITH=71
-EQ=72
-NEQ=73
-LT=74
-LTE=75
-GT=76
-GTE=77
-PLUS=78
-MINUS=79
-ASTERISK=80
-SLASH=81
-PERCENT=82
-CONCAT=83
-DOT=84
-PARAM=85
-STRING=86
-INTEGER_VALUE=87
-DECIMAL_VALUE=88
-IDENTIFIER=89
-DIGIT_IDENTIFIER=90
-TABLE_IDENTIFIER=91
-QUOTED_IDENTIFIER=92
-BACKQUOTED_IDENTIFIER=93
-SIMPLE_COMMENT=94
-BRACKETED_COMMENT=95
-WS=96
-UNRECOGNIZED=97
-DELIMITER=98
+ESCAPE_ESC=72
+FUNCTION_ESC=73
+LIMIT_ESC=74
+DATE_ESC=75
+TIME_ESC=76
+TIMESTAMP_ESC=77
+GUID_ESC=78
+ESC_END=79
+EQ=80
+NEQ=81
+LT=82
+LTE=83
+GT=84
+GTE=85
+PLUS=86
+MINUS=87
+ASTERISK=88
+SLASH=89
+PERCENT=90
+CONCAT=91
+DOT=92
+PARAM=93
+STRING=94
+INTEGER_VALUE=95
+DECIMAL_VALUE=96
+IDENTIFIER=97
+DIGIT_IDENTIFIER=98
+TABLE_IDENTIFIER=99
+QUOTED_IDENTIFIER=100
+BACKQUOTED_IDENTIFIER=101
+SIMPLE_COMMENT=102
+BRACKETED_COMMENT=103
+WS=104
+UNRECOGNIZED=105
+DELIMITER=106
'('=1
')'=2
','=3
@@ -167,16 +175,24 @@ DELIMITER=98
'VERIFY'=69
'WHERE'=70
'WITH'=71
-'='=72
-'<'=74
-'<='=75
-'>'=76
-'>='=77
-'+'=78
-'-'=79
-'*'=80
-'/'=81
-'%'=82
-'||'=83
-'.'=84
-'?'=85
+'{ESCAPE'=72
+'{FN'=73
+'{LIMIT'=74
+'{D'=75
+'{T'=76
+'{TS'=77
+'{GUID'=78
+'}'=79
+'='=80
+'<'=82
+'<='=83
+'>'=84
+'>='=85
+'+'=86
+'-'=87
+'*'=88
+'/'=89
+'%'=90
+'||'=91
+'.'=92
+'?'=93
diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
index a687a9215ec..155d4860e0e 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens
@@ -69,32 +69,40 @@ USING=68
VERIFY=69
WHERE=70
WITH=71
-EQ=72
-NEQ=73
-LT=74
-LTE=75
-GT=76
-GTE=77
-PLUS=78
-MINUS=79
-ASTERISK=80
-SLASH=81
-PERCENT=82
-CONCAT=83
-DOT=84
-PARAM=85
-STRING=86
-INTEGER_VALUE=87
-DECIMAL_VALUE=88
-IDENTIFIER=89
-DIGIT_IDENTIFIER=90
-TABLE_IDENTIFIER=91
-QUOTED_IDENTIFIER=92
-BACKQUOTED_IDENTIFIER=93
-SIMPLE_COMMENT=94
-BRACKETED_COMMENT=95
-WS=96
-UNRECOGNIZED=97
+ESCAPE_ESC=72
+FUNCTION_ESC=73
+LIMIT_ESC=74
+DATE_ESC=75
+TIME_ESC=76
+TIMESTAMP_ESC=77
+GUID_ESC=78
+ESC_END=79
+EQ=80
+NEQ=81
+LT=82
+LTE=83
+GT=84
+GTE=85
+PLUS=86
+MINUS=87
+ASTERISK=88
+SLASH=89
+PERCENT=90
+CONCAT=91
+DOT=92
+PARAM=93
+STRING=94
+INTEGER_VALUE=95
+DECIMAL_VALUE=96
+IDENTIFIER=97
+DIGIT_IDENTIFIER=98
+TABLE_IDENTIFIER=99
+QUOTED_IDENTIFIER=100
+BACKQUOTED_IDENTIFIER=101
+SIMPLE_COMMENT=102
+BRACKETED_COMMENT=103
+WS=104
+UNRECOGNIZED=105
'('=1
')'=2
','=3
@@ -166,16 +174,24 @@ UNRECOGNIZED=97
'VERIFY'=69
'WHERE'=70
'WITH'=71
-'='=72
-'<'=74
-'<='=75
-'>'=76
-'>='=77
-'+'=78
-'-'=79
-'*'=80
-'/'=81
-'%'=82
-'||'=83
-'.'=84
-'?'=85
+'{ESCAPE'=72
+'{FN'=73
+'{LIMIT'=74
+'{D'=75
+'{T'=76
+'{TS'=77
+'{GUID'=78
+'}'=79
+'='=80
+'<'=82
+'<='=83
+'>'=84
+'>='=85
+'+'=86
+'-'=87
+'*'=88
+'/'=89
+'%'=90
+'||'=91
+'.'=92
+'?'=93
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
index 6f8be61b463..4915a25a55b 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java
@@ -213,10 +213,11 @@ abstract class Verifier {
* Check validity of Aggregate/GroupBy.
* This rule is needed for multiple reasons:
* 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar)
- * 2. the order/having might contain a non-grouped attribute. This is typically
+ * 2. the ORDER BY/HAVING might contain a non-grouped attribute. This is typically
* caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved
* (because the expression gets resolved little by little without being pushed down,
* without the Analyzer modifying anything.
+ * 2a. HAVING also requires an Aggregate function
* 3. composite agg (used for GROUP BY) allows ordering only on the group keys
*/
private static boolean checkGroupBy(LogicalPlan p, Set localFailures,
@@ -244,7 +245,7 @@ abstract class Verifier {
}
// make sure to compare attributes directly
- if (Expressions.anyMatch(a.groupings(),
+ if (Expressions.anyMatch(a.groupings(),
g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) {
return;
}
@@ -278,13 +279,14 @@ abstract class Verifier {
Map> missing = new LinkedHashMap<>();
Expression condition = f.condition();
- condition.collectFirstChildren(c -> checkGroupMatch(c, condition, a.groupings(), missing, functions));
+ // variation of checkGroupMatch customized for HAVING, which requires just aggregations
+ condition.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, condition, missing, functions));
if (!missing.isEmpty()) {
String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY;
- localFailures.add(fail(condition, "Cannot filter by non-grouped column" + plural + " %s, expected %s",
- Expressions.names(missing.keySet()),
- Expressions.names(a.groupings())));
+ localFailures.add(
+ fail(condition, "Cannot filter HAVING on non-aggregate" + plural + " %s; consider using WHERE instead",
+ Expressions.names(missing.keySet())));
groupingFailures.add(a);
return false;
}
@@ -294,6 +296,57 @@ abstract class Verifier {
}
+ private static boolean checkGroupByHavingHasOnlyAggs(Expression e, Node> source,
+ Map> missing, Map functions) {
+
+ // resolve FunctionAttribute to backing functions
+ if (e instanceof FunctionAttribute) {
+ FunctionAttribute fa = (FunctionAttribute) e;
+ Function function = functions.get(fa.functionId());
+ // TODO: this should be handled by a different rule
+ if (function == null) {
+ return false;
+ }
+ e = function;
+ }
+
+ // scalar functions can be a binary tree
+ // first test the function against the grouping
+ // and if that fails, start unpacking hoping to find matches
+ if (e instanceof ScalarFunction) {
+ ScalarFunction sf = (ScalarFunction) e;
+
+ // unwrap function to find the base
+ for (Expression arg : sf.arguments()) {
+ arg.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, source, missing, functions));
+ }
+ return true;
+
+ } else if (e instanceof Score) {
+ // Score can't be used for having
+ missing.put(e, source);
+ return true;
+ }
+
+ // skip literals / foldable
+ if (e.foldable()) {
+ return true;
+ }
+ // skip aggs (allowed to refer to non-group columns)
+ if (Functions.isAggregate(e)) {
+ return true;
+ }
+
+ // left without leaves which have to match; that's a failure since everything should be based on an agg
+ if (e instanceof Attribute) {
+ missing.put(e, source);
+ return true;
+ }
+
+ return false;
+ }
+
+
// check whether plain columns specified in an agg are mentioned in the group-by
private static boolean checkGroupByAgg(LogicalPlan p, Set localFailures,
Set groupingFailures, Map functions) {
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
index 35eb76af67c..66ec98ea53c 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
@@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.expression.Order;
import org.elasticsearch.xpack.sql.expression.ScalarSubquery;
import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute;
import org.elasticsearch.xpack.sql.expression.UnresolvedStar;
+import org.elasticsearch.xpack.sql.expression.function.Function;
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
import org.elasticsearch.xpack.sql.expression.function.scalar.Cast;
import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add;
@@ -48,14 +49,19 @@ import org.elasticsearch.xpack.sql.expression.regex.RLike;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext;
-import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractTemplateContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionTemplateContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GuidEscapedLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext;
@@ -66,6 +72,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.OrderByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParamLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternEscapeContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext;
@@ -76,10 +83,16 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimeEscapedLiteralContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext;
import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue;
import org.elasticsearch.xpack.sql.tree.Location;
import org.elasticsearch.xpack.sql.type.DataType;
import org.elasticsearch.xpack.sql.type.DataTypes;
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormatter;
+import org.joda.time.format.DateTimeFormatterBuilder;
+import org.joda.time.format.ISODateTimeFormat;
import java.math.BigDecimal;
import java.util.List;
@@ -222,17 +235,18 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
char escape = 0;
- String escapeString = string(ctx.escape);
+ PatternEscapeContext escapeCtx = ctx.patternEscape();
+ String escapeString = escapeCtx == null ? null : string(escapeCtx.escape);
if (Strings.hasText(escapeString)) {
// shouldn't happen but adding validation in case the string parsing gets wonky
if (escapeString.length() > 1) {
- throw new ParsingException(source(ctx.escape), "A character not a string required for escaping; found [{}]", escapeString);
+ throw new ParsingException(source(escapeCtx), "A character not a string required for escaping; found [{}]", escapeString);
} else if (escapeString.length() == 1) {
escape = escapeString.charAt(0);
// these chars already have a meaning
if (escape == '*' || escape == '%' || escape == '_') {
- throw new ParsingException(source(ctx.escape), "Char [{}] cannot be used for escaping", escape);
+ throw new ParsingException(source(escapeCtx.escape), "Char [{}] cannot be used for escaping", escape);
}
// lastly validate that escape chars (if present) are followed by special chars
for (int i = 0; i < pattern.length(); i++) {
@@ -324,11 +338,6 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
ctx.DESC() != null ? Order.OrderDirection.DESC : Order.OrderDirection.ASC);
}
- @Override
- public Object visitCast(CastContext ctx) {
- return new Cast(source(ctx), expression(ctx.expression()), typedParsing(ctx.dataType(), DataType.class));
- }
-
@Override
public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) {
String type = visitIdentifier(ctx.identifier()).toLowerCase(Locale.ROOT);
@@ -367,20 +376,32 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
}
+ //
+ // Functions template
+ //
@Override
- public Object visitFunctionCall(FunctionCallContext ctx) {
- String name = visitIdentifier(ctx.identifier());
- boolean isDistinct = ctx.setQuantifier() != null && ctx.setQuantifier().DISTINCT() != null;
- UnresolvedFunction.ResolutionType resolutionType =
- isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
- return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(ctx.expression()));
+ public Cast visitCastExpression(CastExpressionContext ctx) {
+ CastTemplateContext ctc = ctx.castTemplate();
+ return new Cast(source(ctc), expression(ctc.expression()), typedParsing(ctc.dataType(), DataType.class));
}
@Override
- public Object visitExtract(ExtractContext ctx) {
- String fieldString = visitIdentifier(ctx.field);
- return new UnresolvedFunction(source(ctx), fieldString,
- UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(ctx.valueExpression())));
+ public Function visitExtractExpression(ExtractExpressionContext ctx) {
+ ExtractTemplateContext template = ctx.extractTemplate();
+ String fieldString = visitIdentifier(template.field);
+ return new UnresolvedFunction(source(template), fieldString,
+ UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(template.valueExpression())));
+ }
+
+ @Override
+ public Function visitFunctionExpression(FunctionExpressionContext ctx) {
+ FunctionTemplateContext template = ctx.functionTemplate();
+
+ String name = visitIdentifier(template.identifier());
+ boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null;
+ UnresolvedFunction.ResolutionType resolutionType =
+ isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
+ return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(template.expression()));
}
@Override
@@ -445,12 +466,12 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
@Override
- public Object visitDecimalLiteral(DecimalLiteralContext ctx) {
+ public Literal visitDecimalLiteral(DecimalLiteralContext ctx) {
return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE);
}
@Override
- public Object visitIntegerLiteral(IntegerLiteralContext ctx) {
+ public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
BigDecimal bigD = new BigDecimal(ctx.getText());
long value = bigD.longValueExact();
@@ -463,7 +484,7 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
}
@Override
- public Object visitParamLiteral(ParamLiteralContext ctx) {
+ public Literal visitParamLiteral(ParamLiteralContext ctx) {
SqlTypedParamValue param = param(ctx.PARAM());
Location loc = source(ctx);
if (param.value == null) {
@@ -522,4 +543,100 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
return params.get(token);
}
-}
+
+ @Override
+ public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+ Location loc = source(ctx);
+ // parse yyyy-MM-dd
+ DateTime dt = null;
+ try {
+ dt = ISODateTimeFormat.date().parseDateTime(string);
+ } catch(IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid date received; {}", ex.getMessage());
+ }
+ return new Literal(loc, dt, DataType.DATE);
+ }
+
+ @Override
+ public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+ Location loc = source(ctx);
+
+ // parse HH:mm:ss
+ DateTime dt = null;
+ try {
+ dt = ISODateTimeFormat.hourMinuteSecond().parseDateTime(string);
+ } catch (IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid time received; {}", ex.getMessage());
+ }
+
+ throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well");
+ }
+
+ @Override
+ public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+
+ Location loc = source(ctx);
+ // parse yyyy-mm-dd hh:mm:ss(.f...)
+ DateTime dt = null;
+ try {
+ DateTimeFormatter formatter = new DateTimeFormatterBuilder()
+ .append(ISODateTimeFormat.date())
+ .appendLiteral(" ")
+ .append(ISODateTimeFormat.hourMinuteSecondFraction())
+ .toFormatter();
+ dt = formatter.parseDateTime(string);
+ } catch (IllegalArgumentException ex) {
+ throw new ParsingException(loc, "Invalid timestamp received; {}", ex.getMessage());
+ }
+ return new Literal(loc, dt, DataType.DATE);
+ }
+
+ @Override
+ public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) {
+ String string = string(ctx.string());
+
+ Location loc = source(ctx.string());
+ // basic validation
+ String lowerCase = string.toLowerCase(Locale.ROOT);
+ // needs to be format nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn
+ // since the length is fixed, the validation happens on absolute values
+ // not pretty but it's fast and doesn't create any extra objects
+
+ String errorPrefix = "Invalid GUID, ";
+
+ if (lowerCase.length() != 36) {
+ throw new ParsingException(loc, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short");
+ }
+
+ int[] separatorPos = { 8, 13, 18, 23 };
+ for (int pos : separatorPos) {
+ if (lowerCase.charAt(pos) != '-') {
+ throw new ParsingException(loc, "{}expected group separator at offset [{}], found [{}]",
+ errorPrefix, pos, string.charAt(pos));
+ }
+ }
+
+ String HEXA = "0123456789abcdef";
+
+ for (int i = 0; i < lowerCase.length(); i++) {
+ // skip separators
+ boolean inspect = true;
+ for (int pos : separatorPos) {
+ if (i == pos) {
+ inspect = false;
+ break;
+ } else if (pos > i) {
+ break;
+ }
+ }
+ if (inspect && HEXA.indexOf(lowerCase.charAt(i)) < 0) {
+ throw new ParsingException(loc, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i));
+ }
+ }
+
+ return new Literal(source(ctx), string, DataType.KEYWORD);
+ }
+}
\ No newline at end of file
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
index 3435994a0fc..58d858c4241 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java
@@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GroupByContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext;
+import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext;
import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext;
@@ -89,9 +90,13 @@ abstract class LogicalPlanBuilder extends ExpressionBuilder {
plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class));
}
- if (ctx.limit != null && ctx.INTEGER_VALUE() != null) {
- plan = new Limit(source(ctx.limit), new Literal(source(ctx),
- Integer.parseInt(ctx.limit.getText()), DataType.INTEGER), plan);
+ LimitClauseContext limitClause = ctx.limitClause();
+ if (limitClause != null) {
+ Token limit = limitClause.limit;
+ if (limit != null && limitClause.INTEGER_VALUE() != null) {
+ plan = new Limit(source(limitClause), new Literal(source(limitClause),
+ Integer.parseInt(limit.getText()), DataType.INTEGER), plan);
+ }
}
return plan;
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
index 4e80e8db9bb..b353bcf6521 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
@@ -208,6 +203,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitLimitClause(SqlBaseParser.LimitClauseContext ctx) { }
/**
* {@inheritDoc}
*
@@ -556,6 +563,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitPattern(SqlBaseParser.PatternContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { }
/**
* {@inheritDoc}
*
@@ -657,13 +676,13 @@ class SqlBaseBaseListener implements SqlBaseListener {
*
* The default implementation does nothing.
*/
- @Override public void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx) { }
+ @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
* The default implementation does nothing.
*/
- @Override public void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { }
+ @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
@@ -712,6 +731,78 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitCastExpression(SqlBaseParser.CastExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { }
/**
* {@inheritDoc}
*
@@ -772,6 +863,54 @@ class SqlBaseBaseListener implements SqlBaseListener {
* The default implementation does nothing.
*/
@Override public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation does nothing.
+ */
+ @Override public void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { }
/**
* {@inheritDoc}
*
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
index 1adb0a423c7..d40ae6daa6e 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java
@@ -1,8 +1,3 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
// ANTLR GENERATED CODE: DO NOT EDIT
package org.elasticsearch.xpack.sql.parser;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
@@ -128,6 +123,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.
*/
@Override public T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { return visitChildren(ctx); }
+ /**
+ * {@inheritDoc}
+ *
+ * The default implementation returns the result of calling
+ * {@link #visitChildren} on {@code ctx}.
+ */
+ @Override public T visitLimitClause(SqlBaseParser.LimitClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
@@ -331,6 +333,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa
* {@link #visitChildren} on {@code ctx}.