From d8dc0425147c134dc72de72fdf7fd69088c76f53 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 17 Sep 2018 19:47:40 +0200 Subject: [PATCH 01/32] [CCR] Handle leader index with no mapping correctly (#33770) When a leader index is created, it may not have a mapping yet. Currently if you follow such an index the shard follow tasks fail with NoSuchElementException, because they expect a single mapping. This commit fixes that, by allowing that a leader index does not yet have a mapping. --- .../ccr/action/ShardFollowTasksExecutor.java | 6 +++++ .../xpack/ccr/ShardChangesIT.java | 24 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 714e1fa289f..d473091f80c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -111,6 +111,12 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor { IndexMetaData indexMetaData = clusterStateResponse.getState().metaData().getIndexSafe(leaderIndex); + if (indexMetaData.getMappings().isEmpty()) { + assert indexMetaData.getMappingVersion() == 1; + handler.accept(indexMetaData.getMappingVersion()); + return; + } + assert indexMetaData.getMappings().size() == 1 : "expected exactly one mapping, but got [" + indexMetaData.getMappings().size() + "]"; MappingMetaData mappingMetaData = indexMetaData.getMappings().iterator().next().value; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java index 73737623a40..3d1789389d7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.bytes.BytesReference; @@ -259,6 +260,29 @@ public class ShardChangesIT extends ESIntegTestCase { unfollowIndex("index2"); } + public void testNoMappingDefined() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index1") + .setSettings(Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .build())); + ensureGreen("index1"); + + final FollowIndexAction.Request followRequest = createFollowRequest("index1", "index2"); + final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); + client().execute(CreateAndFollowIndexAction.INSTANCE, createAndFollowRequest).get(); + + client().prepareIndex("index1", "doc", "1").setSource("{\"f\":1}", XContentType.JSON).get(); + assertBusy(() -> assertThat(client().prepareSearch("index2").get().getHits().totalHits, equalTo(1L))); + unfollowIndex("index2"); + + MappingMetaData mappingMetaData = client().admin().indices().prepareGetMappings("index2").get().getMappings() + .get("index2").get("doc"); + assertThat(XContentMapValues.extractValue("properties.f.type", mappingMetaData.sourceAsMap()), equalTo("long")); + assertThat(XContentMapValues.extractValue("properties.k", mappingMetaData.sourceAsMap()), nullValue()); + } + public void testFollowIndex_backlog() throws Exception { String leaderIndexSettings = getIndexSettings(between(1, 5), between(0, 1), singletonMap(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), "true")); From a5bad4d92c564f7f5c843d12a4dcb11b8c757735 Mon Sep 17 00:00:00 2001 From: Or Bin Date: Mon, 17 Sep 2018 22:35:55 +0300 Subject: [PATCH 02/32] Docs: Fixed a grammatical mistake: 'a HTTP ...' -> 'an HTTP ...' (#33744) Fixed a grammatical mistake: 'a HTTP ...' -> 'an HTTP ...' Closes #33728 --- docs/java-rest/low-level/configuration.asciidoc | 2 +- docs/plugins/plugin-script.asciidoc | 2 +- docs/reference/modules/http.asciidoc | 2 +- docs/reference/settings/notification-settings.asciidoc | 2 +- docs/reference/settings/security-settings.asciidoc | 4 ++-- docs/reference/setup/install/windows.asciidoc | 2 +- docs/ruby/client.asciidoc | 4 ++-- .../org/elasticsearch/http/netty4/Netty4CorsTests.java | 6 +++--- .../netty4/Netty4SizeHeaderFrameDecoderTests.java | 4 ++-- .../elasticsearch/http/nio/HttpReadWriteHandlerTests.java | 6 +++--- .../org/elasticsearch/http/DetailedErrorsDisabledIT.java | 2 +- .../src/main/java/org/elasticsearch/http/HttpChannel.java | 2 +- .../main/java/org/elasticsearch/monitor/package-info.java | 2 +- .../java/org/elasticsearch/transport/TcpTransport.java | 8 ++++---- .../org/elasticsearch/http/DefaultRestChannelTests.java | 6 +++--- .../org/elasticsearch/transport/TcpTransportTests.java | 2 +- .../elasticsearch/test/fixture/AbstractHttpFixture.java | 4 ++-- .../java/org/elasticsearch/test/rest/ESRestTestCase.java | 2 +- x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc | 4 ++-- x-pack/docs/en/watcher/customizing-watches.asciidoc | 2 +- x-pack/docs/en/watcher/encrypting-data.asciidoc | 4 ++-- .../security/transport/SecurityHttpExceptionHandler.java | 4 ++-- .../xpack/watcher/common/http/HttpClient.java | 4 ++-- .../xpack/watcher/common/http/HttpClientTests.java | 2 +- x-pack/qa/third-party/jira/build.gradle | 2 +- 25 files changed, 42 insertions(+), 42 deletions(-) diff --git a/docs/java-rest/low-level/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc index 0b58c82724b..aa4e843778a 100644 --- a/docs/java-rest/low-level/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -54,7 +54,7 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-config-basi -------------------------------------------------- Preemptive Authentication can be disabled, which means that every request will be sent without -authorization headers to see if it is accepted and, upon receiving a HTTP 401 response, it will +authorization headers to see if it is accepted and, upon receiving an HTTP 401 response, it will resend the exact same request with the basic authentication header. If you wish to do this, then you can do so by disabling it via the `HttpAsyncClientBuilder`: diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc index 67546588dcc..0612d3992af 100644 --- a/docs/plugins/plugin-script.asciidoc +++ b/docs/plugins/plugin-script.asciidoc @@ -88,7 +88,7 @@ must not be contained in the `plugins` directory for the node that you are installing the plugin to or installation will fail. HTTP:: -To install a plugin from a HTTP URL: +To install a plugin from an HTTP URL: + [source,shell] ----------------------------------- diff --git a/docs/reference/modules/http.asciidoc b/docs/reference/modules/http.asciidoc index dab8e813689..d622c3b6e66 100644 --- a/docs/reference/modules/http.asciidoc +++ b/docs/reference/modules/http.asciidoc @@ -96,7 +96,7 @@ and stack traces in response output. Note: When set to `false` and the `error_tr parameter is specified, an error will be returned; when `error_trace` is not specified, a simple message will be returned. Defaults to `true` -|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before a HTTP connection is closed, defaults to `10000`. +|`http.pipelining.max_events` |The maximum number of events to be queued up in memory before an HTTP connection is closed, defaults to `10000`. |`http.max_warning_header_count` |The maximum number of warning headers in client HTTP responses, defaults to unbounded. diff --git a/docs/reference/settings/notification-settings.asciidoc b/docs/reference/settings/notification-settings.asciidoc index 7a3d832ed34..ec9b8e31af2 100644 --- a/docs/reference/settings/notification-settings.asciidoc +++ b/docs/reference/settings/notification-settings.asciidoc @@ -60,7 +60,7 @@ The maximum period of inactivity between two data packets, before the request is aborted. `xpack.http.max_response_size`:: -Specifies the maximum size a HTTP response is allowed to have, defaults to +Specifies the maximum size an HTTP response is allowed to have, defaults to `10mb`, the maximum configurable value is `50mb`. [[ssl-notification-settings]] diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index 1fc441a0622..2176d0d3ee6 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -78,7 +78,7 @@ The roles to associate with the anonymous user. Required. When `true`, an HTTP 403 response is returned if the anonymous user does not have the appropriate permissions for the requested action. The user is not prompted to provide credentials to access the requested -resource. When set to `false`, a HTTP 401 is returned and the user +resource. When set to `false`, an HTTP 401 response is returned and the user can provide credentials with the appropriate permissions to gain access. Defaults to `true`. @@ -1381,4 +1381,4 @@ List of IP addresses to allow for this profile. `transport.profiles.$PROFILE.xpack.security.filter.deny`:: List of IP addresses to deny for this profile. -include::security-hash-settings.asciidoc[] \ No newline at end of file +include::security-hash-settings.asciidoc[] diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index dffdc48fe7b..3f64698f334 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -69,7 +69,7 @@ name, node name and roles to be set, in addition to memory and network settings: [[msi-installer-configuration]] image::images/msi_installer/msi_installer_configuration.png[] -A list of common plugins that can be downloaded and installed as part of the installation, with the option to configure a HTTPS proxy through which to download these plugins. +A list of common plugins that can be downloaded and installed as part of the installation, with the option to configure an HTTPS proxy through which to download these plugins. TIP: Ensure the installation machine has access to the internet and that any corporate firewalls in place are configured to allow downloads from `artifacts.elastic.co`: diff --git a/docs/ruby/client.asciidoc b/docs/ruby/client.asciidoc index 0c824938901..0301e47d8bc 100644 --- a/docs/ruby/client.asciidoc +++ b/docs/ruby/client.asciidoc @@ -84,7 +84,7 @@ client.search index: 'my-index', body: { query: { match: { title: 'test' } } } The `elasticsearch` gem combines two separate Rubygems: * https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-transport[`elasticsearch-transport`] -provides a HTTP Ruby client for connecting to the Elasticsearch cluster, +provides an HTTP Ruby client for connecting to the Elasticsearch cluster, * https://github.com/elastic/elasticsearch-ruby/tree/master/elasticsearch-api[`elasticsearch-api`] provides a Ruby API for the Elasticsearch RESTful API. @@ -94,7 +94,7 @@ Please see their respective documentation for configuration options and technica Notably, the documentation and comprehensive examples for all the API methods is contained in the source, and available online at http://rubydoc.info/gems/elasticsearch-api/Elasticsearch/API/Actions[Rubydoc]. -Keep in mind, that for optimal performance, you should use a HTTP library which supports +Keep in mind, that for optimal performance, you should use an HTTP library which supports persistent ("keep-alive") HTTP connections. diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4CorsTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4CorsTests.java index 15a0850f64d..115e6735eb2 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4CorsTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4CorsTests.java @@ -46,7 +46,7 @@ import static org.hamcrest.Matchers.nullValue; public class Netty4CorsTests extends ESTestCase { public void testCorsEnabledWithoutAllowOrigins() { - // Set up a HTTP transport with only the CORS enabled setting + // Set up an HTTP transport with only the CORS enabled setting Settings settings = Settings.builder() .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true) .build(); @@ -57,7 +57,7 @@ public class Netty4CorsTests extends ESTestCase { public void testCorsEnabledWithAllowOrigins() { final String originValue = "remote-host"; - // create a http transport with CORS enabled and allow origin configured + // create an HTTP transport with CORS enabled and allow origin configured Settings settings = Settings.builder() .put(SETTING_CORS_ENABLED.getKey(), true) .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) @@ -72,7 +72,7 @@ public class Netty4CorsTests extends ESTestCase { public void testCorsAllowOriginWithSameHost() { String originValue = "remote-host"; String host = "remote-host"; - // create a http transport with CORS enabled + // create an HTTP transport with CORS enabled Settings settings = Settings.builder() .put(SETTING_CORS_ENABLED.getKey(), true) .build(); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java index 4c783cf0787..564cf61a395 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoderTests.java @@ -44,7 +44,7 @@ import java.util.Collections; import static org.hamcrest.Matchers.is; /** - * This test checks, if a HTTP look-alike request (starting with a HTTP method and a space) + * This test checks, if an HTTP look-alike request (starting with an HTTP method and a space) * actually returns text response instead of just dropping the connection */ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { @@ -91,7 +91,7 @@ public class Netty4SizeHeaderFrameDecoderTests extends ESTestCase { socket.getOutputStream().flush(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))) { - assertThat(reader.readLine(), is("This is not a HTTP port")); + assertThat(reader.readLine(), is("This is not an HTTP port")); } } } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java index 62bf845a770..d7e61f21173 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/HttpReadWriteHandlerTests.java @@ -218,7 +218,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase { } public void testCorsEnabledWithoutAllowOrigins() throws IOException { - // Set up a HTTP transport with only the CORS enabled setting + // Set up an HTTP transport with only the CORS enabled setting Settings settings = Settings.builder() .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true) .build(); @@ -233,7 +233,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase { public void testCorsEnabledWithAllowOrigins() throws IOException { final String originValue = "remote-host"; - // create a http transport with CORS enabled and allow origin configured + // create an HTTP transport with CORS enabled and allow origin configured Settings settings = Settings.builder() .put(SETTING_CORS_ENABLED.getKey(), true) .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) @@ -252,7 +252,7 @@ public class HttpReadWriteHandlerTests extends ESTestCase { public void testCorsAllowOriginWithSameHost() throws IOException { String originValue = "remote-host"; String host = "remote-host"; - // create a http transport with CORS enabled + // create an HTTP transport with CORS enabled Settings settings = Settings.builder() .put(SETTING_CORS_ENABLED.getKey(), true) .build(); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java index 6b2f49c5833..17a1f3b5d25 100644 --- a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java @@ -33,7 +33,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; /** - * Tests that when disabling detailed errors, a request with the error_trace parameter returns a HTTP 400 + * Tests that when disabling detailed errors, a request with the error_trace parameter returns an HTTP 400 response. */ @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) public class DetailedErrorsDisabledIT extends HttpSmokeTestCase { diff --git a/server/src/main/java/org/elasticsearch/http/HttpChannel.java b/server/src/main/java/org/elasticsearch/http/HttpChannel.java index ea8d3c276b1..f8bd69d4b88 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpChannel.java +++ b/server/src/main/java/org/elasticsearch/http/HttpChannel.java @@ -27,7 +27,7 @@ import java.net.InetSocketAddress; public interface HttpChannel extends CloseableChannel { /** - * Sends a http response to the channel. The listener will be executed once the send process has been + * Sends an http response to the channel. The listener will be executed once the send process has been * completed. * * @param response to send to channel diff --git a/server/src/main/java/org/elasticsearch/monitor/package-info.java b/server/src/main/java/org/elasticsearch/monitor/package-info.java index bcfb4ce1f49..4bb9217e9e2 100644 --- a/server/src/main/java/org/elasticsearch/monitor/package-info.java +++ b/server/src/main/java/org/elasticsearch/monitor/package-info.java @@ -18,7 +18,7 @@ */ /** - * Monitors the Elasticsearch process and the system on which it is running so that metrics can be exposed via a HTTP or transport APIs to + * Monitors the Elasticsearch process and the system on which it is running so that metrics can be exposed via an HTTP or transport APIs to * be logged and graphed. */ package org.elasticsearch.monitor; diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index 2552007463b..d1abc261ea7 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -1087,7 +1087,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements * @param bytesReference the bytes available to consume * @return the number of bytes consumed * @throws StreamCorruptedException if the message header format is not recognized - * @throws TcpTransport.HttpOnTransportException if the message header appears to be a HTTP message + * @throws TcpTransport.HttpOnTransportException if the message header appears to be an HTTP message * @throws IllegalArgumentException if the message length is greater that the maximum allowed frame size. * This is dependent on the available memory. */ @@ -1109,7 +1109,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements * @param networkBytes the will be read * @return the message decoded * @throws StreamCorruptedException if the message header format is not recognized - * @throws TcpTransport.HttpOnTransportException if the message header appears to be a HTTP message + * @throws TcpTransport.HttpOnTransportException if the message header appears to be an HTTP message * @throws IllegalArgumentException if the message length is greater that the maximum allowed frame size. * This is dependent on the available memory. */ @@ -1136,7 +1136,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements * @param networkBytes the will be read * @return the length of the message * @throws StreamCorruptedException if the message header format is not recognized - * @throws TcpTransport.HttpOnTransportException if the message header appears to be a HTTP message + * @throws TcpTransport.HttpOnTransportException if the message header appears to be an HTTP message * @throws IllegalArgumentException if the message length is greater that the maximum allowed frame size. * This is dependent on the available memory. */ @@ -1151,7 +1151,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements private static int readHeaderBuffer(BytesReference headerBuffer) throws IOException { if (headerBuffer.get(0) != 'E' || headerBuffer.get(1) != 'S') { if (appearsToBeHTTP(headerBuffer)) { - throw new TcpTransport.HttpOnTransportException("This is not a HTTP port"); + throw new TcpTransport.HttpOnTransportException("This is not an HTTP port"); } throw new StreamCorruptedException("invalid internal transport message format, got (" diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index fd683761098..1a19ccf9b4e 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -92,7 +92,7 @@ public class DefaultRestChannelTests extends ESTestCase { // TODO: Enable these Cors tests when the Cors logic lives in :server // public void testCorsEnabledWithoutAllowOrigins() { -// // Set up a HTTP transport with only the CORS enabled setting +// // Set up an HTTP transport with only the CORS enabled setting // Settings settings = Settings.builder() // .put(HttpTransportSettings.SETTING_CORS_ENABLED.getKey(), true) // .build(); @@ -103,7 +103,7 @@ public class DefaultRestChannelTests extends ESTestCase { // // public void testCorsEnabledWithAllowOrigins() { // final String originValue = "remote-host"; -// // create a http transport with CORS enabled and allow origin configured +// // create an HTTP transport with CORS enabled and allow origin configured // Settings settings = Settings.builder() // .put(SETTING_CORS_ENABLED.getKey(), true) // .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) @@ -118,7 +118,7 @@ public class DefaultRestChannelTests extends ESTestCase { // public void testCorsAllowOriginWithSameHost() { // String originValue = "remote-host"; // String host = "remote-host"; -// // create a http transport with CORS enabled +// // create an HTTP transport with CORS enabled // Settings settings = Settings.builder() // .put(SETTING_CORS_ENABLED.getKey(), true) // .build(); diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index 0bf12ba82c8..bc7ef0fd5d2 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -429,7 +429,7 @@ public class TcpTransportTests extends ESTestCase { fail("Expected exception"); } catch (Exception ex) { assertThat(ex, instanceOf(TcpTransport.HttpOnTransportException.class)); - assertEquals("This is not a HTTP port", ex.getMessage()); + assertEquals("This is not an HTTP port", ex.getMessage()); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java index 7fb4e7c55ff..92905926b50 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java +++ b/test/framework/src/main/java/org/elasticsearch/test/fixture/AbstractHttpFixture.java @@ -157,7 +157,7 @@ public abstract class AbstractHttpFixture { } /** - * Represents a HTTP Response. + * Represents an HTTP Response. */ protected static class Response { @@ -203,7 +203,7 @@ public abstract class AbstractHttpFixture { } /** - * Represents a HTTP Request. + * Represents an HTTP Request. */ protected static class Request { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 9d47c4e24a9..1b29a9112c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -161,7 +161,7 @@ public abstract class ESRestTestCase extends ESTestCase { } /** - * Construct a HttpHost from the given host and port + * Construct an HttpHost from the given host and port */ protected HttpHost buildHttpHost(String host, int port) { return new HttpHost(host, port, getProtocol()); diff --git a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc index ec2c60c543b..9b8505e0896 100644 --- a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc +++ b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc @@ -57,7 +57,7 @@ This API supports the following fields: | `ignore_condition` | no | false | When set to `true`, the watch execution uses the {xpack-ref}/condition-always.html[Always Condition]. - This can also be specified as a HTTP parameter. + This can also be specified as an HTTP parameter. | `alternative_input` | no | null | When present, the watch uses this object as a payload instead of executing its own input. @@ -70,7 +70,7 @@ This API supports the following fields: execution result is persisted to the `.watcher-history` index for the current time. In addition, the status of the watch is updated, possibly throttling subsequent executions. - This can also be specified as a HTTP parameter. + This can also be specified as an HTTP parameter. | `watch` | no | null | When present, this {xpack-ref}/how-watcher-works.html#watch-definition[watch] is used diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index fc45bc636bf..34b9c38229f 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -83,7 +83,7 @@ See <> for more details. [[loading-http-data]] ==== Loading a Payload from a remote HTTP Service with HTTP Input -Use the `http` input to issue a HTTP request and load the returned response as +Use the `http` input to issue an HTTP request and load the returned response as the watch initial payload. This input expects the response body content type to either be JSON or YAML. diff --git a/x-pack/docs/en/watcher/encrypting-data.asciidoc b/x-pack/docs/en/watcher/encrypting-data.asciidoc index 9319c9f7938..66138b54efb 100644 --- a/x-pack/docs/en/watcher/encrypting-data.asciidoc +++ b/x-pack/docs/en/watcher/encrypting-data.asciidoc @@ -6,8 +6,8 @@ information or details about your SMTP email service. You can encrypt this data by generating a key and adding some secure settings on each node in your cluster. -Every `password` field that is used in your watch within a HTTP basic -authentication block - for example within a webhook, a HTTP input or when using +Every `password` field that is used in your watch within an HTTP basic +authentication block - for example within a webhook, an HTTP input or when using the reporting email attachment - will not be stored as plain text anymore. Also be aware, that there is no way to configure your own fields in a watch to be encrypted. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java index c1999c5ddfb..20d2115116f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java @@ -36,10 +36,10 @@ public final class SecurityHttpExceptionHandler implements BiConsumer Date: Mon, 17 Sep 2018 22:33:37 +0200 Subject: [PATCH 03/32] [CCR] Do not unnecessarily wrap fetch exception in a ElasticSearch exception and (#33777) * [CCR] Do not unnecessarily wrap fetch exception in a ElasticSearch exception and properly map fetch_exception.exception field as object. The extra caused by level is not necessary here: ``` "fetch_exceptions": [ { "from_seq_no": 1, "retries": 106, "exception": { "type": "exception", "reason": "[index1] IndexNotFoundException[no such index]", "caused_by": { "type": "index_not_found_exception", "reason": "no such index", "index_uuid": "_na_", "index": "index1" } } } ], ``` --- .../xpack/ccr/action/ShardFollowNodeTask.java | 3 ++- .../xpack/ccr/action/ShardFollowNodeTaskTests.java | 10 ++++------ .../collector/ccr/CcrStatsMonitoringDocTests.java | 9 ++++++++- .../plugin/core/src/main/resources/monitoring-es.json | 11 ++++++++++- 4 files changed, 24 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java index 6bf880661fc..7c1cc3eb137 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTask.java @@ -9,6 +9,7 @@ package org.elasticsearch.xpack.ccr.action; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.Tuple; @@ -246,7 +247,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask { synchronized (ShardFollowNodeTask.this) { totalFetchTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime); numberOfFailedFetches++; - fetchExceptions.put(from, Tuple.tuple(retryCounter, new ElasticsearchException(e))); + fetchExceptions.put(from, Tuple.tuple(retryCounter, ExceptionsHelper.convertToElastic(e))); } handleFailure(e, retryCounter, () -> sendShardChangesRequest(from, maxOperationCount, maxRequiredSeqNo, retryCounter)); }); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java index ea4a1c12b45..7e813ae4cf6 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskTests.java @@ -198,12 +198,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase { final Map.Entry> entry = status.fetchExceptions().entrySet().iterator().next(); assertThat(entry.getValue().v1(), equalTo(Math.toIntExact(retryCounter.get()))); assertThat(entry.getKey(), equalTo(0L)); - assertThat(entry.getValue().v2(), instanceOf(ElasticsearchException.class)); - assertNotNull(entry.getValue().v2().getCause()); - assertThat(entry.getValue().v2().getCause(), instanceOf(ShardNotFoundException.class)); - final ShardNotFoundException cause = (ShardNotFoundException) entry.getValue().v2().getCause(); - assertThat(cause.getShardId().getIndexName(), equalTo("leader_index")); - assertThat(cause.getShardId().getId(), equalTo(0)); + assertThat(entry.getValue().v2(), instanceOf(ShardNotFoundException.class)); + final ShardNotFoundException shardNotFoundException = (ShardNotFoundException) entry.getValue().v2(); + assertThat(shardNotFoundException.getShardId().getIndexName(), equalTo("leader_index")); + assertThat(shardNotFoundException.getShardId().getId(), equalTo(0)); } retryCounter.incrementAndGet(); }; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java index 9124e1d5245..808a1e20159 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/monitoring/collector/ccr/CcrStatsMonitoringDocTests.java @@ -238,10 +238,17 @@ public class CcrStatsMonitoringDocTests extends BaseMonitoringDocTestCase) fieldMapping.get("properties")).size(), equalTo(3)); assertThat(XContentMapValues.extractValue("properties.from_seq_no.type", fieldMapping), equalTo("long")); assertThat(XContentMapValues.extractValue("properties.retries.type", fieldMapping), equalTo("integer")); - assertThat(XContentMapValues.extractValue("properties.exception.type", fieldMapping), equalTo("text")); + assertThat(XContentMapValues.extractValue("properties.exception.type", fieldMapping), equalTo("object")); + + Map exceptionFieldMapping = + (Map) XContentMapValues.extractValue("properties.exception.properties", fieldMapping); + assertThat(exceptionFieldMapping.size(), equalTo(2)); + assertThat(XContentMapValues.extractValue("type.type", exceptionFieldMapping), equalTo("keyword")); + assertThat(XContentMapValues.extractValue("reason.type", exceptionFieldMapping), equalTo("text")); } else { fail("unexpected field value type [" + fieldValue.getClass() + "] for field [" + fieldName + "]"); } diff --git a/x-pack/plugin/core/src/main/resources/monitoring-es.json b/x-pack/plugin/core/src/main/resources/monitoring-es.json index 444f15912e6..8464f495371 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-es.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-es.json @@ -983,6 +983,7 @@ "type": "long" }, "fetch_exceptions": { + "type": "nested", "properties": { "from_seq_no": { "type": "long" @@ -991,7 +992,15 @@ "type": "integer" }, "exception": { - "type": "text" + "type": "object", + "properties": { + "type" : { + "type": "keyword" + }, + "reason": { + "type": "text" + } + } } } }, From e686909768b163a0999e98efe7adb3046eeef3df Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 17 Sep 2018 15:10:16 -0700 Subject: [PATCH 04/32] Build: Change test task sanity check to be per project (#33544) This commit changes the sanity check which ensures the test task was properly replaced with randomized testing to have a per project check, isntead of a global one. The previous global check assumed all test tasks within the root project and below should be randomized testing, but that is not the case for a multi project in which only one project is an elasticsearch plugin. While the new check is not able to emit all of the failed replacements in one error message, the efficacy of the check remains. --- .../junit4/RandomizedTestingPlugin.groovy | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index d4c8f89bf50..01458f4543d 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -1,7 +1,6 @@ package com.carrotsearch.gradle.junit4 import com.carrotsearch.ant.tasks.junit4.JUnit4 -import org.gradle.api.GradleException import org.gradle.api.Plugin import org.gradle.api.Project import org.gradle.api.Task @@ -11,12 +10,8 @@ import org.gradle.api.tasks.TaskContainer import org.gradle.api.tasks.TaskProvider import org.gradle.api.tasks.testing.Test -import java.util.concurrent.atomic.AtomicBoolean - class RandomizedTestingPlugin implements Plugin { - static private AtomicBoolean sanityCheckConfigured = new AtomicBoolean(false) - void apply(Project project) { setupSeed(project) replaceTestTask(project.tasks) @@ -27,16 +22,10 @@ class RandomizedTestingPlugin implements Plugin { private static void configureSanityCheck(Project project) { // Check the task graph to confirm tasks were indeed replaced // https://github.com/elastic/elasticsearch/issues/31324 - if (sanityCheckConfigured.getAndSet(true) == false) { - project.rootProject.getGradle().getTaskGraph().whenReady { - List nonConforming = project.getGradle().getTaskGraph().allTasks - .findAll { it.name == "test" } - .findAll { (it instanceof RandomizedTestingTask) == false} - .collect { "${it.path} -> ${it.class}" } - if (nonConforming.isEmpty() == false) { - throw new GradleException("Found the ${nonConforming.size()} `test` tasks:" + - "\n ${nonConforming.join("\n ")}") - } + project.rootProject.getGradle().getTaskGraph().whenReady { + Task test = project.getTasks().findByName("test") + if (test != null && (test instanceof RandomizedTestingTask) == false) { + throw new IllegalStateException("Test task was not replaced in project ${project.path}. Found ${test.getClass()}") } } } From 42e106fb226b767788859a32be86878b2ae10653 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Mon, 17 Sep 2018 17:38:30 -0500 Subject: [PATCH 05/32] HLRC: split indices request converters (#33433) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the IndicesClient request converters. --- .../elasticsearch/client/IndicesClient.java | 104 +- .../client/IndicesRequestConverters.java | 403 ++++++++ .../client/RequestConverters.java | 356 ------- .../client/IndicesRequestConvertersTests.java | 893 ++++++++++++++++++ .../client/RequestConvertersTests.java | 819 +--------------- 5 files changed, 1352 insertions(+), 1223 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index c4567e22e0b..3811ba78344 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -89,7 +89,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse delete(DeleteIndexRequest deleteIndexRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options, + return restHighLevelClient.performRequestAndParseEntity(deleteIndexRequest, IndicesRequestConverters::deleteIndex, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -102,7 +102,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void deleteAsync(DeleteIndexRequest deleteIndexRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, RequestConverters::deleteIndex, options, + restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, IndicesRequestConverters::deleteIndex, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -116,7 +116,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public CreateIndexResponse create(CreateIndexRequest createIndexRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(createIndexRequest, RequestConverters::createIndex, options, + return restHighLevelClient.performRequestAndParseEntity(createIndexRequest, IndicesRequestConverters::createIndex, options, CreateIndexResponse::fromXContent, emptySet()); } @@ -129,7 +129,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void createAsync(CreateIndexRequest createIndexRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(createIndexRequest, RequestConverters::createIndex, options, + restHighLevelClient.performRequestAsyncAndParseEntity(createIndexRequest, IndicesRequestConverters::createIndex, options, CreateIndexResponse::fromXContent, listener, emptySet()); } @@ -143,7 +143,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse putMapping(PutMappingRequest putMappingRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, RequestConverters::putMapping, options, + return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -157,7 +157,7 @@ public final class IndicesClient { */ public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, RequestConverters::putMapping, options, + restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, IndicesRequestConverters::putMapping, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -171,7 +171,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, + return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, IndicesRequestConverters::getMappings, options, GetMappingsResponse::fromXContent, emptySet()); } @@ -185,7 +185,7 @@ public final class IndicesClient { */ public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, IndicesRequestConverters::getMappings, options, GetMappingsResponse::fromXContent, listener, emptySet()); } @@ -200,7 +200,7 @@ public final class IndicesClient { */ public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, IndicesRequestConverters::getFieldMapping, options, GetFieldMappingsResponse::fromXContent, emptySet()); } @@ -214,7 +214,7 @@ public final class IndicesClient { */ public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, IndicesRequestConverters::getFieldMapping, options, GetFieldMappingsResponse::fromXContent, listener, emptySet()); } @@ -228,7 +228,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse updateAliases(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options, + return restHighLevelClient.performRequestAndParseEntity(indicesAliasesRequest, IndicesRequestConverters::updateAliases, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -242,7 +242,7 @@ public final class IndicesClient { */ public void updateAliasesAsync(IndicesAliasesRequest indicesAliasesRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(indicesAliasesRequest, RequestConverters::updateAliases, options, + restHighLevelClient.performRequestAsyncAndParseEntity(indicesAliasesRequest, IndicesRequestConverters::updateAliases, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -256,7 +256,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public OpenIndexResponse open(OpenIndexRequest openIndexRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(openIndexRequest, RequestConverters::openIndex, options, + return restHighLevelClient.performRequestAndParseEntity(openIndexRequest, IndicesRequestConverters::openIndex, options, OpenIndexResponse::fromXContent, emptySet()); } @@ -269,7 +269,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void openAsync(OpenIndexRequest openIndexRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(openIndexRequest, RequestConverters::openIndex, options, + restHighLevelClient.performRequestAsyncAndParseEntity(openIndexRequest, IndicesRequestConverters::openIndex, options, OpenIndexResponse::fromXContent, listener, emptySet()); } @@ -283,7 +283,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse close(CloseIndexRequest closeIndexRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options, + return restHighLevelClient.performRequestAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -296,7 +296,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void closeAsync(CloseIndexRequest closeIndexRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, RequestConverters::closeIndex, options, + restHighLevelClient.performRequestAsyncAndParseEntity(closeIndexRequest, IndicesRequestConverters::closeIndex, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -311,7 +311,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request */ public boolean existsAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest(getAliasesRequest, RequestConverters::existsAlias, options, + return restHighLevelClient.performRequest(getAliasesRequest, IndicesRequestConverters::existsAlias, options, RestHighLevelClient::convertExistsResponse, emptySet()); } @@ -324,7 +324,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void existsAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsync(getAliasesRequest, RequestConverters::existsAlias, options, + restHighLevelClient.performRequestAsync(getAliasesRequest, IndicesRequestConverters::existsAlias, options, RestHighLevelClient::convertExistsResponse, listener, emptySet()); } @@ -337,7 +337,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public RefreshResponse refresh(RefreshRequest refreshRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(refreshRequest, RequestConverters::refresh, options, + return restHighLevelClient.performRequestAndParseEntity(refreshRequest, IndicesRequestConverters::refresh, options, RefreshResponse::fromXContent, emptySet()); } @@ -349,7 +349,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void refreshAsync(RefreshRequest refreshRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(refreshRequest, RequestConverters::refresh, options, + restHighLevelClient.performRequestAsyncAndParseEntity(refreshRequest, IndicesRequestConverters::refresh, options, RefreshResponse::fromXContent, listener, emptySet()); } @@ -362,7 +362,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public FlushResponse flush(FlushRequest flushRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(flushRequest, RequestConverters::flush, options, + return restHighLevelClient.performRequestAndParseEntity(flushRequest, IndicesRequestConverters::flush, options, FlushResponse::fromXContent, emptySet()); } @@ -374,7 +374,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void flushAsync(FlushRequest flushRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(flushRequest, RequestConverters::flush, options, + restHighLevelClient.performRequestAsyncAndParseEntity(flushRequest, IndicesRequestConverters::flush, options, FlushResponse::fromXContent, listener, emptySet()); } @@ -388,7 +388,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public SyncedFlushResponse flushSynced(SyncedFlushRequest syncedFlushRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(syncedFlushRequest, RequestConverters::flushSynced, options, + return restHighLevelClient.performRequestAndParseEntity(syncedFlushRequest, IndicesRequestConverters::flushSynced, options, SyncedFlushResponse::fromXContent, emptySet()); } @@ -402,7 +402,7 @@ public final class IndicesClient { */ public void flushSyncedAsync(SyncedFlushRequest syncedFlushRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(syncedFlushRequest, RequestConverters::flushSynced, options, + restHighLevelClient.performRequestAsyncAndParseEntity(syncedFlushRequest, IndicesRequestConverters::flushSynced, options, SyncedFlushResponse::fromXContent, listener, emptySet()); } @@ -416,7 +416,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetSettingsResponse getSettings(GetSettingsRequest getSettingsRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, RequestConverters::getSettings, options, + return restHighLevelClient.performRequestAndParseEntity(getSettingsRequest, IndicesRequestConverters::getSettings, options, GetSettingsResponse::fromXContent, emptySet()); } @@ -430,7 +430,7 @@ public final class IndicesClient { */ public void getSettingsAsync(GetSettingsRequest getSettingsRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, RequestConverters::getSettings, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getSettingsRequest, IndicesRequestConverters::getSettings, options, GetSettingsResponse::fromXContent, listener, emptySet()); } @@ -444,7 +444,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetIndexResponse get(GetIndexRequest getIndexRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, RequestConverters::getIndex, options, + return restHighLevelClient.performRequestAndParseEntity(getIndexRequest, IndicesRequestConverters::getIndex, options, GetIndexResponse::fromXContent, emptySet()); } @@ -458,7 +458,7 @@ public final class IndicesClient { */ public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, RequestConverters::getIndex, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getIndexRequest, IndicesRequestConverters::getIndex, options, GetIndexResponse::fromXContent, listener, emptySet()); } @@ -487,7 +487,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, + return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, IndicesRequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, emptySet()); } @@ -514,7 +514,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, + restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, IndicesRequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, listener, emptySet()); } @@ -529,7 +529,7 @@ public final class IndicesClient { */ public ClearIndicesCacheResponse clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(clearIndicesCacheRequest, RequestConverters::clearCache, options, + return restHighLevelClient.performRequestAndParseEntity(clearIndicesCacheRequest, IndicesRequestConverters::clearCache, options, ClearIndicesCacheResponse::fromXContent, emptySet()); } @@ -543,7 +543,7 @@ public final class IndicesClient { */ public void clearCacheAsync(ClearIndicesCacheRequest clearIndicesCacheRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(clearIndicesCacheRequest, RequestConverters::clearCache, options, + restHighLevelClient.performRequestAsyncAndParseEntity(clearIndicesCacheRequest, IndicesRequestConverters::clearCache, options, ClearIndicesCacheResponse::fromXContent, listener, emptySet()); } @@ -559,7 +559,7 @@ public final class IndicesClient { public boolean exists(GetIndexRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequest( request, - RequestConverters::indicesExist, + IndicesRequestConverters::indicesExist, options, RestHighLevelClient::convertExistsResponse, Collections.emptySet() @@ -577,7 +577,7 @@ public final class IndicesClient { public void existsAsync(GetIndexRequest request, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsync( request, - RequestConverters::indicesExist, + IndicesRequestConverters::indicesExist, options, RestHighLevelClient::convertExistsResponse, listener, @@ -595,7 +595,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public ResizeResponse shrink(ResizeRequest resizeRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(resizeRequest, RequestConverters::shrink, options, + return restHighLevelClient.performRequestAndParseEntity(resizeRequest, IndicesRequestConverters::shrink, options, ResizeResponse::fromXContent, emptySet()); } @@ -608,7 +608,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void shrinkAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, RequestConverters::shrink, options, + restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, IndicesRequestConverters::shrink, options, ResizeResponse::fromXContent, listener, emptySet()); } @@ -622,7 +622,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public ResizeResponse split(ResizeRequest resizeRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(resizeRequest, RequestConverters::split, options, + return restHighLevelClient.performRequestAndParseEntity(resizeRequest, IndicesRequestConverters::split, options, ResizeResponse::fromXContent, emptySet()); } @@ -635,7 +635,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void splitAsync(ResizeRequest resizeRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, RequestConverters::split, options, + restHighLevelClient.performRequestAsyncAndParseEntity(resizeRequest, IndicesRequestConverters::split, options, ResizeResponse::fromXContent, listener, emptySet()); } @@ -649,7 +649,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public RolloverResponse rollover(RolloverRequest rolloverRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(rolloverRequest, RequestConverters::rollover, options, + return restHighLevelClient.performRequestAndParseEntity(rolloverRequest, IndicesRequestConverters::rollover, options, RolloverResponse::fromXContent, emptySet()); } @@ -662,7 +662,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void rolloverAsync(RolloverRequest rolloverRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(rolloverRequest, RequestConverters::rollover, options, + restHighLevelClient.performRequestAsyncAndParseEntity(rolloverRequest, IndicesRequestConverters::rollover, options, RolloverResponse::fromXContent, listener, emptySet()); } @@ -676,7 +676,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetAliasesResponse getAlias(GetAliasesRequest getAliasesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getAliasesRequest, RequestConverters::getAlias, options, + return restHighLevelClient.performRequestAndParseEntity(getAliasesRequest, IndicesRequestConverters::getAlias, options, GetAliasesResponse::fromXContent, singleton(RestStatus.NOT_FOUND.getStatus())); } @@ -689,7 +689,7 @@ public final class IndicesClient { * @param listener the listener to be notified upon request completion */ public void getAliasAsync(GetAliasesRequest getAliasesRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getAliasesRequest, RequestConverters::getAlias, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getAliasesRequest, IndicesRequestConverters::getAlias, options, GetAliasesResponse::fromXContent, listener, singleton(RestStatus.NOT_FOUND.getStatus())); } @@ -703,7 +703,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse putSettings(UpdateSettingsRequest updateSettingsRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options, + return restHighLevelClient.performRequestAndParseEntity(updateSettingsRequest, IndicesRequestConverters::indexPutSettings, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -717,7 +717,7 @@ public final class IndicesClient { */ public void putSettingsAsync(UpdateSettingsRequest updateSettingsRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(updateSettingsRequest, RequestConverters::indexPutSettings, options, + restHighLevelClient.performRequestAsyncAndParseEntity(updateSettingsRequest, IndicesRequestConverters::indexPutSettings, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -732,7 +732,7 @@ public final class IndicesClient { */ public AcknowledgedResponse putTemplate(PutIndexTemplateRequest putIndexTemplateRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options, + return restHighLevelClient.performRequestAndParseEntity(putIndexTemplateRequest, IndicesRequestConverters::putTemplate, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -746,7 +746,7 @@ public final class IndicesClient { */ public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, RequestConverters::putTemplate, options, + restHighLevelClient.performRequestAsyncAndParseEntity(putIndexTemplateRequest, IndicesRequestConverters::putTemplate, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -761,7 +761,7 @@ public final class IndicesClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, IndicesRequestConverters::validateQuery, options, ValidateQueryResponse::fromXContent, emptySet()); } @@ -776,7 +776,7 @@ public final class IndicesClient { */ public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, IndicesRequestConverters::validateQuery, options, ValidateQueryResponse::fromXContent, listener, emptySet()); } @@ -791,7 +791,7 @@ public final class IndicesClient { */ public GetIndexTemplatesResponse getTemplate(GetIndexTemplatesRequest getIndexTemplatesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates, + return restHighLevelClient.performRequestAndParseEntity(getIndexTemplatesRequest, IndicesRequestConverters::getTemplates, options, GetIndexTemplatesResponse::fromXContent, emptySet()); } @@ -805,7 +805,7 @@ public final class IndicesClient { */ public void getTemplateAsync(GetIndexTemplatesRequest getIndexTemplatesRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, RequestConverters::getTemplates, + restHighLevelClient.performRequestAsyncAndParseEntity(getIndexTemplatesRequest, IndicesRequestConverters::getTemplates, options, GetIndexTemplatesResponse::fromXContent, listener, emptySet()); } @@ -818,7 +818,7 @@ public final class IndicesClient { * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized */ public AnalyzeResponse analyze(AnalyzeRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::analyze, options, + return restHighLevelClient.performRequestAndParseEntity(request, IndicesRequestConverters::analyze, options, AnalyzeResponse::fromXContent, emptySet()); } @@ -833,7 +833,7 @@ public final class IndicesClient { */ public void analyzeAsync(AnalyzeRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::analyze, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, IndicesRequestConverters::analyze, options, AnalyzeResponse::fromXContent, listener, emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java new file mode 100644 index 00000000000..740b87107c1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -0,0 +1,403 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; +import org.elasticsearch.action.admin.indices.shrink.ResizeType; +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.common.Strings; + +import java.io.IOException; +import java.util.Locale; + +public class IndicesRequestConverters { + static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) { + String endpoint = RequestConverters.endpoint(deleteIndexRequest.indices()); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(deleteIndexRequest.timeout()); + parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout()); + parameters.withIndicesOptions(deleteIndexRequest.indicesOptions()); + return request; + } + + static Request openIndex(OpenIndexRequest openIndexRequest) { + String endpoint = RequestConverters.endpoint(openIndexRequest.indices(), "_open"); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(openIndexRequest.timeout()); + parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout()); + parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards()); + parameters.withIndicesOptions(openIndexRequest.indicesOptions()); + return request; + } + + static Request closeIndex(CloseIndexRequest closeIndexRequest) { + String endpoint = RequestConverters.endpoint(closeIndexRequest.indices(), "_close"); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(closeIndexRequest.timeout()); + parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout()); + parameters.withIndicesOptions(closeIndexRequest.indicesOptions()); + return request; + } + + static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException { + String endpoint = RequestConverters.endpoint(createIndexRequest.indices()); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(createIndexRequest.timeout()); + parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); + parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); + + request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { + Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(indicesAliasesRequest.timeout()); + parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout()); + + request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request putMapping(PutMappingRequest putMappingRequest) throws IOException { + // The concreteIndex is an internal concept, not applicable to requests made over the REST API. + if (putMappingRequest.getConcreteIndex() != null) { + throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API"); + } + + Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(putMappingRequest.indices(), "_mapping", + putMappingRequest.type())); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(putMappingRequest.timeout()); + parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); + + request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException { + String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); + String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types(); + + Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping", types)); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); + parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); + parameters.withLocal(getMappingsRequest.local()); + return request; + } + + static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException { + String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); + String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); + String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); + + String endpoint = new RequestConverters.EndpointBuilder().addCommaSeparatedPathParts(indices) + .addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types) + .addPathPartAsIs("field").addCommaSeparatedPathParts(fields) + .build(); + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); + parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); + parameters.withLocal(getFieldMappingsRequest.local()); + return request; + } + + static Request refresh(RefreshRequest refreshRequest) { + String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); + Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_refresh")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(refreshRequest.indicesOptions()); + return request; + } + + static Request flush(FlushRequest flushRequest) { + String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices(); + Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(flushRequest.indicesOptions()); + parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); + parameters.putParam("force", Boolean.toString(flushRequest.force())); + return request; + } + + static Request flushSynced(SyncedFlushRequest syncedFlushRequest) { + String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices(); + Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_flush/synced")); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(syncedFlushRequest.indicesOptions()); + return request; + } + + static Request forceMerge(ForceMergeRequest forceMergeRequest) { + String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices(); + Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_forcemerge")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(forceMergeRequest.indicesOptions()); + parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); + parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); + parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush())); + return request; + } + + static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) { + String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices(); + Request request = new Request(HttpPost.METHOD_NAME, RequestConverters.endpoint(indices, "_cache/clear")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions()); + parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); + parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); + parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); + parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields())); + return request; + } + + static Request existsAlias(GetAliasesRequest getAliasesRequest) { + if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && + (getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) { + throw new IllegalArgumentException("existsAlias requires at least an alias or an index"); + } + String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); + String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); + + Request request = new Request(HttpHead.METHOD_NAME, RequestConverters.endpoint(indices, "_alias", aliases)); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withIndicesOptions(getAliasesRequest.indicesOptions()); + params.withLocal(getAliasesRequest.local()); + return request; + } + + static Request split(ResizeRequest resizeRequest) throws IOException { + if (resizeRequest.getResizeType() != ResizeType.SPLIT) { + throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request"); + } + return resize(resizeRequest); + } + + static Request shrink(ResizeRequest resizeRequest) throws IOException { + if (resizeRequest.getResizeType() != ResizeType.SHRINK) { + throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request"); + } + return resize(resizeRequest); + } + + private static Request resize(ResizeRequest resizeRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPart(resizeRequest.getSourceIndex()) + .addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT)) + .addPathPart(resizeRequest.getTargetIndexRequest().index()).build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withTimeout(resizeRequest.timeout()); + params.withMasterTimeout(resizeRequest.masterNodeTimeout()); + params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards()); + + request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request rollover(RolloverRequest rolloverRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover") + .addPathPart(rolloverRequest.getNewIndexName()).build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withTimeout(rolloverRequest.timeout()); + params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); + params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); + if (rolloverRequest.isDryRun()) { + params.putParam("dry_run", Boolean.TRUE.toString()); + } + + request.setEntity(RequestConverters.createEntity(rolloverRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getSettings(GetSettingsRequest getSettingsRequest) { + String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); + String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); + + String endpoint = RequestConverters.endpoint(indices, "_settings", names); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withIndicesOptions(getSettingsRequest.indicesOptions()); + params.withLocal(getSettingsRequest.local()); + params.withIncludeDefaults(getSettingsRequest.includeDefaults()); + params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); + + return request; + } + + static Request getIndex(GetIndexRequest getIndexRequest) { + String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); + + String endpoint = RequestConverters.endpoint(indices); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withIndicesOptions(getIndexRequest.indicesOptions()); + params.withLocal(getIndexRequest.local()); + params.withIncludeDefaults(getIndexRequest.includeDefaults()); + params.withHuman(getIndexRequest.humanReadable()); + params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); + + return request; + } + + static Request indicesExist(GetIndexRequest getIndexRequest) { + // this can be called with no indices as argument by transport client, not via REST though + if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { + throw new IllegalArgumentException("indices are mandatory"); + } + String endpoint = RequestConverters.endpoint(getIndexRequest.indices(), ""); + Request request = new Request(HttpHead.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + params.withLocal(getIndexRequest.local()); + params.withHuman(getIndexRequest.humanReadable()); + params.withIndicesOptions(getIndexRequest.indicesOptions()); + params.withIncludeDefaults(getIndexRequest.includeDefaults()); + return request; + } + + static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException { + String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices(); + Request request = new Request(HttpPut.METHOD_NAME, RequestConverters.endpoint(indices, "_settings")); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(updateSettingsRequest.timeout()); + parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout()); + parameters.withIndicesOptions(updateSettingsRequest.indicesOptions()); + parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting()); + + request.setEntity(RequestConverters.createEntity(updateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template") + .addPathPart(putIndexTemplateRequest.name()).build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); + if (putIndexTemplateRequest.create()) { + params.putParam("create", Boolean.TRUE.toString()); + } + if (Strings.hasText(putIndexTemplateRequest.cause())) { + params.putParam("cause", putIndexTemplateRequest.cause()); + } + request.setEntity(RequestConverters.createEntity(putIndexTemplateRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { + String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); + String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); + String endpoint = RequestConverters.endpoint(indices, types, "_validate/query"); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withIndicesOptions(validateQueryRequest.indicesOptions()); + params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); + params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); + params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + request.setEntity(RequestConverters.createEntity(validateQueryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getAlias(GetAliasesRequest getAliasesRequest) { + String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); + String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); + String endpoint = RequestConverters.endpoint(indices, "_alias", aliases); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withIndicesOptions(getAliasesRequest.indicesOptions()); + params.withLocal(getAliasesRequest.local()); + return request; + } + + static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) throws IOException { + String[] names = getIndexTemplatesRequest.names(); + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addCommaSeparatedPathParts(names).build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withLocal(getIndexTemplatesRequest.local()); + params.withMasterTimeout(getIndexTemplatesRequest.masterNodeTimeout()); + return request; + } + + static Request analyze(AnalyzeRequest request) throws IOException { + RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder(); + String index = request.index(); + if (index != null) { + builder.addPathPart(index); + } + builder.addPathPartAsIs("_analyze"); + Request req = new Request(HttpGet.METHOD_NAME, builder.build()); + req.setEntity(RequestConverters.createEntity(request, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return req; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 840bc4f0c4d..8372f4b0fec 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -33,30 +33,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; -import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeType; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.explain.ExplainRequest; @@ -130,165 +107,6 @@ final class RequestConverters { return request; } - static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) { - String endpoint = endpoint(deleteIndexRequest.indices()); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(deleteIndexRequest.timeout()); - parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout()); - parameters.withIndicesOptions(deleteIndexRequest.indicesOptions()); - return request; - } - - static Request openIndex(OpenIndexRequest openIndexRequest) { - String endpoint = endpoint(openIndexRequest.indices(), "_open"); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(openIndexRequest.timeout()); - parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout()); - parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards()); - parameters.withIndicesOptions(openIndexRequest.indicesOptions()); - return request; - } - - static Request closeIndex(CloseIndexRequest closeIndexRequest) { - String endpoint = endpoint(closeIndexRequest.indices(), "_close"); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(closeIndexRequest.timeout()); - parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout()); - parameters.withIndicesOptions(closeIndexRequest.indicesOptions()); - return request; - } - - static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException { - String endpoint = endpoint(createIndexRequest.indices()); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(createIndexRequest.timeout()); - parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); - parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); - - request.setEntity(createEntity(createIndexRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws IOException { - Request request = new Request(HttpPost.METHOD_NAME, "/_aliases"); - - Params parameters = new Params(request); - parameters.withTimeout(indicesAliasesRequest.timeout()); - parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout()); - - request.setEntity(createEntity(indicesAliasesRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putMapping(PutMappingRequest putMappingRequest) throws IOException { - // The concreteIndex is an internal concept, not applicable to requests made over the REST API. - if (putMappingRequest.getConcreteIndex() != null) { - throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API"); - } - - Request request = new Request(HttpPut.METHOD_NAME, endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type())); - - Params parameters = new Params(request); - parameters.withTimeout(putMappingRequest.timeout()); - parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); - - request.setEntity(createEntity(putMappingRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException { - String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); - String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types(); - - Request request = new Request(HttpGet.METHOD_NAME, endpoint(indices, "_mapping", types)); - - Params parameters = new Params(request); - parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); - parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); - parameters.withLocal(getMappingsRequest.local()); - return request; - } - - static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException { - String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); - String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); - String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); - - String endpoint = new EndpointBuilder().addCommaSeparatedPathParts(indices) - .addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types) - .addPathPartAsIs("field").addCommaSeparatedPathParts(fields) - .build(); - - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); - parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); - parameters.withLocal(getFieldMappingsRequest.local()); - return request; - } - - static Request refresh(RefreshRequest refreshRequest) { - String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh")); - - Params parameters = new Params(request); - parameters.withIndicesOptions(refreshRequest.indicesOptions()); - return request; - } - - static Request flush(FlushRequest flushRequest) { - String[] indices = flushRequest.indices() == null ? Strings.EMPTY_ARRAY : flushRequest.indices(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_flush")); - - Params parameters = new Params(request); - parameters.withIndicesOptions(flushRequest.indicesOptions()); - parameters.putParam("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); - parameters.putParam("force", Boolean.toString(flushRequest.force())); - return request; - } - - static Request flushSynced(SyncedFlushRequest syncedFlushRequest) { - String[] indices = syncedFlushRequest.indices() == null ? Strings.EMPTY_ARRAY : syncedFlushRequest.indices(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_flush/synced")); - Params parameters = new Params(request); - parameters.withIndicesOptions(syncedFlushRequest.indicesOptions()); - return request; - } - - static Request forceMerge(ForceMergeRequest forceMergeRequest) { - String[] indices = forceMergeRequest.indices() == null ? Strings.EMPTY_ARRAY : forceMergeRequest.indices(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_forcemerge")); - - Params parameters = new Params(request); - parameters.withIndicesOptions(forceMergeRequest.indicesOptions()); - parameters.putParam("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); - parameters.putParam("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); - parameters.putParam("flush", Boolean.toString(forceMergeRequest.flush())); - return request; - } - - static Request clearCache(ClearIndicesCacheRequest clearIndicesCacheRequest) { - String[] indices = clearIndicesCacheRequest.indices() == null ? Strings.EMPTY_ARRAY :clearIndicesCacheRequest.indices(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_cache/clear")); - - Params parameters = new Params(request); - parameters.withIndicesOptions(clearIndicesCacheRequest.indicesOptions()); - parameters.putParam("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); - parameters.putParam("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); - parameters.putParam("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); - parameters.putParam("fields", String.join(",", clearIndicesCacheRequest.fields())); - return request; - } - static Request info() { return new Request(HttpGet.METHOD_NAME, "/"); } @@ -609,22 +427,6 @@ final class RequestConverters { return request; } - static Request existsAlias(GetAliasesRequest getAliasesRequest) { - if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) && - (getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) { - throw new IllegalArgumentException("existsAlias requires at least an alias or an index"); - } - String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); - String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); - - Request request = new Request(HttpHead.METHOD_NAME, endpoint(indices, "_alias", aliases)); - - Params params = new Params(request); - params.withIndicesOptions(getAliasesRequest.indicesOptions()); - params.withLocal(getAliasesRequest.local()); - return request; - } - static Request explain(ExplainRequest explainRequest) throws IOException { Request request = new Request(HttpGet.METHOD_NAME, endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain")); @@ -657,35 +459,6 @@ final class RequestConverters { return request; } - static Request split(ResizeRequest resizeRequest) throws IOException { - if (resizeRequest.getResizeType() != ResizeType.SPLIT) { - throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices split request"); - } - return resize(resizeRequest); - } - - static Request shrink(ResizeRequest resizeRequest) throws IOException { - if (resizeRequest.getResizeType() != ResizeType.SHRINK) { - throw new IllegalArgumentException("Wrong resize type [" + resizeRequest.getResizeType() + "] for indices shrink request"); - } - return resize(resizeRequest); - } - - private static Request resize(ResizeRequest resizeRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPart(resizeRequest.getSourceIndex()) - .addPathPartAsIs("_" + resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT)) - .addPathPart(resizeRequest.getTargetIndexRequest().index()).build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - Params params = new Params(request); - params.withTimeout(resizeRequest.timeout()); - params.withMasterTimeout(resizeRequest.masterNodeTimeout()); - params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards()); - - request.setEntity(createEntity(resizeRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request reindex(ReindexRequest reindexRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -754,135 +527,6 @@ final class RequestConverters { return request; } - static Request rollover(RolloverRequest rolloverRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover") - .addPathPart(rolloverRequest.getNewIndexName()).build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - Params params = new Params(request); - params.withTimeout(rolloverRequest.timeout()); - params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); - params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); - if (rolloverRequest.isDryRun()) { - params.putParam("dry_run", Boolean.TRUE.toString()); - } - - request.setEntity(createEntity(rolloverRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getSettings(GetSettingsRequest getSettingsRequest) { - String[] indices = getSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.indices(); - String[] names = getSettingsRequest.names() == null ? Strings.EMPTY_ARRAY : getSettingsRequest.names(); - - String endpoint = endpoint(indices, "_settings", names); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params params = new Params(request); - params.withIndicesOptions(getSettingsRequest.indicesOptions()); - params.withLocal(getSettingsRequest.local()); - params.withIncludeDefaults(getSettingsRequest.includeDefaults()); - params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); - - return request; - } - - static Request getIndex(GetIndexRequest getIndexRequest) { - String[] indices = getIndexRequest.indices() == null ? Strings.EMPTY_ARRAY : getIndexRequest.indices(); - - String endpoint = endpoint(indices); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params params = new Params(request); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withLocal(getIndexRequest.local()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - params.withHuman(getIndexRequest.humanReadable()); - params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); - - return request; - } - - static Request indicesExist(GetIndexRequest getIndexRequest) { - // this can be called with no indices as argument by transport client, not via REST though - if (getIndexRequest.indices() == null || getIndexRequest.indices().length == 0) { - throw new IllegalArgumentException("indices are mandatory"); - } - String endpoint = endpoint(getIndexRequest.indices(), ""); - Request request = new Request(HttpHead.METHOD_NAME, endpoint); - - Params params = new Params(request); - params.withLocal(getIndexRequest.local()); - params.withHuman(getIndexRequest.humanReadable()); - params.withIndicesOptions(getIndexRequest.indicesOptions()); - params.withIncludeDefaults(getIndexRequest.includeDefaults()); - return request; - } - - static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) throws IOException { - String[] indices = updateSettingsRequest.indices() == null ? Strings.EMPTY_ARRAY : updateSettingsRequest.indices(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint(indices, "_settings")); - - Params parameters = new Params(request); - parameters.withTimeout(updateSettingsRequest.timeout()); - parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout()); - parameters.withIndicesOptions(updateSettingsRequest.indicesOptions()); - parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting()); - - request.setEntity(createEntity(updateSettingsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - Params params = new Params(request); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); - if (putIndexTemplateRequest.create()) { - params.putParam("create", Boolean.TRUE.toString()); - } - if (Strings.hasText(putIndexTemplateRequest.cause())) { - params.putParam("cause", putIndexTemplateRequest.cause()); - } - request.setEntity(createEntity(putIndexTemplateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { - String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); - String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); - String endpoint = endpoint(indices, types, "_validate/query"); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params params = new Params(request); - params.withIndicesOptions(validateQueryRequest.indicesOptions()); - params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); - params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); - params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); - request.setEntity(createEntity(validateQueryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getAlias(GetAliasesRequest getAliasesRequest) { - String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); - String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); - String endpoint = endpoint(indices, "_alias", aliases); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params params = new Params(request); - params.withIndicesOptions(getAliasesRequest.indicesOptions()); - params.withLocal(getAliasesRequest.local()); - return request; - } - - static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) throws IOException { - String[] names = getIndexTemplatesRequest.names(); - String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addCommaSeparatedPathParts(names).build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params params = new Params(request); - params.withLocal(getIndexTemplatesRequest.local()); - params.withMasterTimeout(getIndexTemplatesRequest.masterNodeTimeout()); - return request; - } - static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java new file mode 100644 index 00000000000..e9704105499 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -0,0 +1,893 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; +import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; +import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; +import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; +import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; +import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; +import org.elasticsearch.action.admin.indices.shrink.ResizeType; +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.RandomCreateIndexGenerator; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.StringJoiner; +import java.util.stream.Collectors; + +import static org.elasticsearch.index.RandomCreateIndexGenerator.randomAliases; +import static org.elasticsearch.index.RandomCreateIndexGenerator.randomCreateIndexRequest; +import static org.elasticsearch.index.RandomCreateIndexGenerator.randomIndexSettings; +import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAliasAction; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class IndicesRequestConvertersTests extends ESTestCase { + + public void testAnalyzeRequest() throws Exception { + AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() + .text("Here is some text") + .index("test_index") + .analyzer("test_analyzer"); + + Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest); + assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); + RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity()); + + AnalyzeRequest analyzeRequest = new AnalyzeRequest() + .text("more text") + .analyzer("test_analyzer"); + assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); + } + + public void testIndicesExist() { + String[] indices = RequestConvertersTests.randomIndicesNames(1, 10); + + GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indices); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); + RequestConvertersTests.setRandomLocal(getIndexRequest, expectedParams); + RequestConvertersTests.setRandomHumanReadable(getIndexRequest, expectedParams); + RequestConvertersTests.setRandomIncludeDefaults(getIndexRequest, expectedParams); + + final Request request = IndicesRequestConverters.indicesExist(getIndexRequest); + + Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod()); + Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint()); + Assert.assertThat(expectedParams, equalTo(request.getParameters())); + Assert.assertNull(request.getEntity()); + } + + public void testIndicesExistEmptyIndices() { + LuceneTestCase.expectThrows(IllegalArgumentException.class, () + -> IndicesRequestConverters.indicesExist(new GetIndexRequest())); + LuceneTestCase.expectThrows(IllegalArgumentException.class, () + -> IndicesRequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null))); + } + + public void testCreateIndex() throws IOException { + CreateIndexRequest createIndexRequest = randomCreateIndexRequest(); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams); + RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); + + Request request = IndicesRequestConverters.createIndex(createIndexRequest); + Assert.assertEquals("/" + createIndexRequest.index(), request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + RequestConvertersTests.assertToXContentBody(createIndexRequest, request.getEntity()); + } + + public void testCreateIndexNullIndex() { + ActionRequestValidationException validationException = new CreateIndexRequest(null).validate(); + Assert.assertNotNull(validationException); + } + + public void testUpdateAliases() throws IOException { + IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); + IndicesAliasesRequest.AliasActions aliasAction = randomAliasAction(); + indicesAliasesRequest.addAliasAction(aliasAction); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(indicesAliasesRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(indicesAliasesRequest, expectedParams); + + Request request = IndicesRequestConverters.updateAliases(indicesAliasesRequest); + Assert.assertEquals("/_aliases", request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + RequestConvertersTests.assertToXContentBody(indicesAliasesRequest, request.getEntity()); + } + + public void testPutMapping() throws IOException { + PutMappingRequest putMappingRequest = new PutMappingRequest(); + + String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); + putMappingRequest.indices(indices); + + String type = ESTestCase.randomAlphaOfLengthBetween(3, 10); + putMappingRequest.type(type); + + Map expectedParams = new HashMap<>(); + + RequestConvertersTests.setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams); + + Request request = IndicesRequestConverters.putMapping(putMappingRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + endpoint.add(type); + Assert.assertEquals(endpoint.toString(), request.getEndpoint()); + + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + RequestConvertersTests.assertToXContentBody(putMappingRequest, request.getEntity()); + } + + public void testGetMapping() throws IOException { + GetMappingsRequest getMappingRequest = new GetMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (ESTestCase.randomBoolean()) { + indices = RequestConvertersTests.randomIndicesNames(0, 5); + getMappingRequest.indices(indices); + } else if (ESTestCase.randomBoolean()) { + getMappingRequest.indices((String[]) null); + } + + String type = null; + if (ESTestCase.randomBoolean()) { + type = ESTestCase.randomAlphaOfLengthBetween(3, 10); + getMappingRequest.types(type); + } else if (ESTestCase.randomBoolean()) { + getMappingRequest.types((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + RequestConvertersTests.setRandomIndicesOptions(getMappingRequest::indicesOptions, + getMappingRequest::indicesOptions, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams); + RequestConvertersTests.setRandomLocal(getMappingRequest, expectedParams); + + Request request = IndicesRequestConverters.getMappings(getMappingRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + Assert.assertThat(expectedParams, equalTo(request.getParameters())); + Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + + public void testGetFieldMapping() throws IOException { + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (ESTestCase.randomBoolean()) { + indices = RequestConvertersTests.randomIndicesNames(0, 5); + getFieldMappingsRequest.indices(indices); + } else if (ESTestCase.randomBoolean()) { + getFieldMappingsRequest.indices((String[]) null); + } + + String type = null; + if (ESTestCase.randomBoolean()) { + type = ESTestCase.randomAlphaOfLengthBetween(3, 10); + getFieldMappingsRequest.types(type); + } else if (ESTestCase.randomBoolean()) { + getFieldMappingsRequest.types((String[]) null); + } + + String[] fields = null; + if (ESTestCase.randomBoolean()) { + fields = new String[ESTestCase.randomIntBetween(1, 5)]; + for (int i = 0; i < fields.length; i++) { + fields[i] = ESTestCase.randomAlphaOfLengthBetween(3, 10); + } + getFieldMappingsRequest.fields(fields); + } else if (ESTestCase.randomBoolean()) { + getFieldMappingsRequest.fields((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + RequestConvertersTests.setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, + expectedParams); + RequestConvertersTests.setRandomLocal(getFieldMappingsRequest::local, expectedParams); + + Request request = IndicesRequestConverters.getFieldMapping(getFieldMappingsRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + endpoint.add("field"); + if (fields != null) { + endpoint.add(String.join(",", fields)); + } + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + Assert.assertThat(expectedParams, equalTo(request.getParameters())); + Assert.assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + + public void testDeleteIndex() { + String[] indices = RequestConvertersTests.randomIndicesNames(0, 5); + DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(deleteIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(deleteIndexRequest, expectedParams); + + RequestConvertersTests.setRandomIndicesOptions(deleteIndexRequest::indicesOptions, deleteIndexRequest::indicesOptions, + expectedParams); + + Request request = IndicesRequestConverters.deleteIndex(deleteIndexRequest); + Assert.assertEquals("/" + String.join(",", indices), request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + Assert.assertNull(request.getEntity()); + } + + public void testGetSettings() throws IOException { + String[] indicesUnderTest = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + + GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(getSettingsRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, + expectedParams); + + RequestConvertersTests.setRandomLocal(getSettingsRequest, expectedParams); + + if (ESTestCase.randomBoolean()) { + // the request object will not have include_defaults present unless it is set to + // true + getSettingsRequest.includeDefaults(ESTestCase.randomBoolean()); + if (getSettingsRequest.includeDefaults()) { + expectedParams.put("include_defaults", Boolean.toString(true)); + } + } + + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indicesUnderTest != null && indicesUnderTest.length > 0) { + endpoint.add(String.join(",", indicesUnderTest)); + } + endpoint.add("_settings"); + + if (ESTestCase.randomBoolean()) { + String[] names = ESTestCase.randomBoolean() ? null : new String[ESTestCase.randomIntBetween(0, 3)]; + if (names != null) { + for (int x = 0; x < names.length; x++) { + names[x] = ESTestCase.randomAlphaOfLengthBetween(3, 10); + } + } + getSettingsRequest.names(names); + if (names != null && names.length > 0) { + endpoint.add(String.join(",", names)); + } + } + + Request request = IndicesRequestConverters.getSettings(getSettingsRequest); + + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + Assert.assertThat(request.getEntity(), nullValue()); + } + + public void testGetIndex() throws IOException { + String[] indicesUnderTest = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + + GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); + RequestConvertersTests.setRandomLocal(getIndexRequest, expectedParams); + RequestConvertersTests.setRandomHumanReadable(getIndexRequest, expectedParams); + + if (ESTestCase.randomBoolean()) { + // the request object will not have include_defaults present unless it is set to + // true + getIndexRequest.includeDefaults(ESTestCase.randomBoolean()); + if (getIndexRequest.includeDefaults()) { + expectedParams.put("include_defaults", Boolean.toString(true)); + } + } + + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indicesUnderTest != null && indicesUnderTest.length > 0) { + endpoint.add(String.join(",", indicesUnderTest)); + } + + Request request = IndicesRequestConverters.getIndex(getIndexRequest); + + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + Assert.assertThat(request.getEntity(), nullValue()); + } + + public void testDeleteIndexEmptyIndices() { + String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; + ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); + Assert.assertNotNull(validationException); + } + + public void testOpenIndex() { + String[] indices = RequestConvertersTests.randomIndicesNames(1, 5); + OpenIndexRequest openIndexRequest = new OpenIndexRequest(indices); + openIndexRequest.indices(indices); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(openIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(openIndexRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(openIndexRequest::indicesOptions, openIndexRequest::indicesOptions, expectedParams); + RequestConvertersTests.setRandomWaitForActiveShards(openIndexRequest::waitForActiveShards, expectedParams); + + Request request = IndicesRequestConverters.openIndex(openIndexRequest); + StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_open"); + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + Assert.assertThat(expectedParams, equalTo(request.getParameters())); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + Assert.assertThat(request.getEntity(), nullValue()); + } + + public void testOpenIndexEmptyIndices() { + String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; + ActionRequestValidationException validationException = new OpenIndexRequest(indices).validate(); + Assert.assertNotNull(validationException); + } + + public void testCloseIndex() { + String[] indices = RequestConvertersTests.randomIndicesNames(1, 5); + CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indices); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(closeIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomMasterTimeout(closeIndexRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(closeIndexRequest::indicesOptions, closeIndexRequest::indicesOptions, + expectedParams); + + Request request = IndicesRequestConverters.closeIndex(closeIndexRequest); + StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_close"); + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + Assert.assertThat(expectedParams, equalTo(request.getParameters())); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + Assert.assertThat(request.getEntity(), nullValue()); + } + + public void testCloseIndexEmptyIndices() { + String[] indices = ESTestCase.randomBoolean() ? null : Strings.EMPTY_ARRAY; + ActionRequestValidationException validationException = new CloseIndexRequest(indices).validate(); + Assert.assertNotNull(validationException); + } + + public void testRefresh() { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + RefreshRequest refreshRequest; + if (ESTestCase.randomBoolean()) { + refreshRequest = new RefreshRequest(indices); + } else { + refreshRequest = new RefreshRequest(); + refreshRequest.indices(indices); + } + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(refreshRequest::indicesOptions, refreshRequest::indicesOptions, expectedParams); + Request request = IndicesRequestConverters.refresh(refreshRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_refresh"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + } + + public void testFlush() { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + FlushRequest flushRequest; + if (ESTestCase.randomBoolean()) { + flushRequest = new FlushRequest(indices); + } else { + flushRequest = new FlushRequest(); + flushRequest.indices(indices); + } + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(flushRequest::indicesOptions, flushRequest::indicesOptions, expectedParams); + if (ESTestCase.randomBoolean()) { + flushRequest.force(ESTestCase.randomBoolean()); + } + expectedParams.put("force", Boolean.toString(flushRequest.force())); + if (ESTestCase.randomBoolean()) { + flushRequest.waitIfOngoing(ESTestCase.randomBoolean()); + } + expectedParams.put("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); + + Request request = IndicesRequestConverters.flush(flushRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_flush"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + } + + public void testSyncedFlush() { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + SyncedFlushRequest syncedFlushRequest; + if (ESTestCase.randomBoolean()) { + syncedFlushRequest = new SyncedFlushRequest(indices); + } else { + syncedFlushRequest = new SyncedFlushRequest(); + syncedFlushRequest.indices(indices); + } + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(syncedFlushRequest::indicesOptions, syncedFlushRequest::indicesOptions, + expectedParams); + Request request = IndicesRequestConverters.flushSynced(syncedFlushRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_flush/synced"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + } + + public void testForceMerge() { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + ForceMergeRequest forceMergeRequest; + if (ESTestCase.randomBoolean()) { + forceMergeRequest = new ForceMergeRequest(indices); + } else { + forceMergeRequest = new ForceMergeRequest(); + forceMergeRequest.indices(indices); + } + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(forceMergeRequest::indicesOptions, forceMergeRequest::indicesOptions, + expectedParams); + if (ESTestCase.randomBoolean()) { + forceMergeRequest.maxNumSegments(ESTestCase.randomInt()); + } + expectedParams.put("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); + if (ESTestCase.randomBoolean()) { + forceMergeRequest.onlyExpungeDeletes(ESTestCase.randomBoolean()); + } + expectedParams.put("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); + if (ESTestCase.randomBoolean()) { + forceMergeRequest.flush(ESTestCase.randomBoolean()); + } + expectedParams.put("flush", Boolean.toString(forceMergeRequest.flush())); + + Request request = IndicesRequestConverters.forceMerge(forceMergeRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_forcemerge"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + } + + public void testClearCache() { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + ClearIndicesCacheRequest clearIndicesCacheRequest; + if (ESTestCase.randomBoolean()) { + clearIndicesCacheRequest = new ClearIndicesCacheRequest(indices); + } else { + clearIndicesCacheRequest = new ClearIndicesCacheRequest(); + clearIndicesCacheRequest.indices(indices); + } + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(clearIndicesCacheRequest::indicesOptions, clearIndicesCacheRequest::indicesOptions, + expectedParams); + if (ESTestCase.randomBoolean()) { + clearIndicesCacheRequest.queryCache(ESTestCase.randomBoolean()); + } + expectedParams.put("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); + if (ESTestCase.randomBoolean()) { + clearIndicesCacheRequest.fieldDataCache(ESTestCase.randomBoolean()); + } + expectedParams.put("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); + if (ESTestCase.randomBoolean()) { + clearIndicesCacheRequest.requestCache(ESTestCase.randomBoolean()); + } + expectedParams.put("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); + if (ESTestCase.randomBoolean()) { + clearIndicesCacheRequest.fields(RequestConvertersTests.randomIndicesNames(1, 5)); + expectedParams.put("fields", String.join(",", clearIndicesCacheRequest.fields())); + } + + Request request = IndicesRequestConverters.clearCache(clearIndicesCacheRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_cache/clear"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + Assert.assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); + } + + public void testExistsAlias() { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + getAliasesRequest.indices(indices); + // the HEAD endpoint requires at least an alias or an index + boolean hasIndices = indices != null && indices.length > 0; + String[] aliases; + if (hasIndices) { + aliases = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + } else { + aliases = RequestConvertersTests.randomIndicesNames(1, 5); + } + getAliasesRequest.aliases(aliases); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomLocal(getAliasesRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, + expectedParams); + + Request request = IndicesRequestConverters.existsAlias(getAliasesRequest); + StringJoiner expectedEndpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + expectedEndpoint.add(String.join(",", indices)); + } + expectedEndpoint.add("_alias"); + if (aliases != null && aliases.length > 0) { + expectedEndpoint.add(String.join(",", aliases)); + } + Assert.assertEquals(HttpHead.METHOD_NAME, request.getMethod()); + Assert.assertEquals(expectedEndpoint.toString(), request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertNull(request.getEntity()); + } + + public void testExistsAliasNoAliasNoIndex() { + { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); + IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, + () -> IndicesRequestConverters.existsAlias(getAliasesRequest)); + Assert.assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); + } + { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null); + getAliasesRequest.indices((String[]) null); + IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, + () -> IndicesRequestConverters.existsAlias(getAliasesRequest)); + Assert.assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); + } + } + + public void testSplit() throws IOException { + resizeTest(ResizeType.SPLIT, IndicesRequestConverters::split); + } + + public void testSplitWrongResizeType() { + ResizeRequest resizeRequest = new ResizeRequest("target", "source"); + resizeRequest.setResizeType(ResizeType.SHRINK); + IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, () + -> IndicesRequestConverters.split(resizeRequest)); + Assert.assertEquals("Wrong resize type [SHRINK] for indices split request", iae.getMessage()); + } + + public void testShrinkWrongResizeType() { + ResizeRequest resizeRequest = new ResizeRequest("target", "source"); + resizeRequest.setResizeType(ResizeType.SPLIT); + IllegalArgumentException iae = LuceneTestCase.expectThrows(IllegalArgumentException.class, () + -> IndicesRequestConverters.shrink(resizeRequest)); + Assert.assertEquals("Wrong resize type [SPLIT] for indices shrink request", iae.getMessage()); + } + + public void testShrink() throws IOException { + resizeTest(ResizeType.SHRINK, IndicesRequestConverters::shrink); + } + + private void resizeTest(ResizeType resizeType, CheckedFunction function) + throws IOException { + String[] indices = RequestConvertersTests.randomIndicesNames(2, 2); + ResizeRequest resizeRequest = new ResizeRequest(indices[0], indices[1]); + resizeRequest.setResizeType(resizeType); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(resizeRequest, expectedParams); + RequestConvertersTests.setRandomTimeout(resizeRequest::timeout, resizeRequest.timeout(), expectedParams); + + if (ESTestCase.randomBoolean()) { + CreateIndexRequest createIndexRequest = new CreateIndexRequest(ESTestCase.randomAlphaOfLengthBetween(3, 10)); + if (ESTestCase.randomBoolean()) { + createIndexRequest.settings(randomIndexSettings()); + } + if (ESTestCase.randomBoolean()) { + randomAliases(createIndexRequest); + } + resizeRequest.setTargetIndex(createIndexRequest); + } + RequestConvertersTests.setRandomWaitForActiveShards(resizeRequest::setWaitForActiveShards, expectedParams); + + Request request = function.apply(resizeRequest); + Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + String expectedEndpoint = "/" + resizeRequest.getSourceIndex() + "/_" + resizeType.name().toLowerCase(Locale.ROOT) + "/" + + resizeRequest.getTargetIndexRequest().index(); + Assert.assertEquals(expectedEndpoint, request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + RequestConvertersTests.assertToXContentBody(resizeRequest, request.getEntity()); + } + + public void testRollover() throws IOException { + RolloverRequest rolloverRequest = new RolloverRequest(ESTestCase.randomAlphaOfLengthBetween(3, 10), + ESTestCase.randomBoolean() ? null : ESTestCase.randomAlphaOfLengthBetween(3, 10)); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams); + RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams); + if (ESTestCase.randomBoolean()) { + rolloverRequest.dryRun(ESTestCase.randomBoolean()); + if (rolloverRequest.isDryRun()) { + expectedParams.put("dry_run", "true"); + } + } + if (ESTestCase.randomBoolean()) { + rolloverRequest.addMaxIndexAgeCondition(new TimeValue(ESTestCase.randomNonNegativeLong())); + } + if (ESTestCase.randomBoolean()) { + String type = ESTestCase.randomAlphaOfLengthBetween(3, 10); + rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type)); + } + if (ESTestCase.randomBoolean()) { + RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); + } + if (ESTestCase.randomBoolean()) { + rolloverRequest.getCreateIndexRequest().settings(RandomCreateIndexGenerator.randomIndexSettings()); + } + RequestConvertersTests.setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams); + + Request request = IndicesRequestConverters.rollover(rolloverRequest); + if (rolloverRequest.getNewIndexName() == null) { + Assert.assertEquals("/" + rolloverRequest.getAlias() + "/_rollover", request.getEndpoint()); + } else { + Assert.assertEquals("/" + rolloverRequest.getAlias() + "/_rollover/" + rolloverRequest.getNewIndexName(), + request.getEndpoint()); + } + Assert.assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + RequestConvertersTests.assertToXContentBody(rolloverRequest, request.getEntity()); + Assert.assertEquals(expectedParams, request.getParameters()); + } + + public void testGetAlias() { + GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); + + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomLocal(getAliasesRequest, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, + expectedParams); + + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2); + String[] aliases = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2); + getAliasesRequest.indices(indices); + getAliasesRequest.aliases(aliases); + + Request request = IndicesRequestConverters.getAlias(getAliasesRequest); + StringJoiner expectedEndpoint = new StringJoiner("/", "/", ""); + + if (false == CollectionUtils.isEmpty(indices)) { + expectedEndpoint.add(String.join(",", indices)); + } + expectedEndpoint.add("_alias"); + + if (false == CollectionUtils.isEmpty(aliases)) { + expectedEndpoint.add(String.join(",", aliases)); + } + + Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + Assert.assertEquals(expectedEndpoint.toString(), request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertNull(request.getEntity()); + } + + public void testIndexPutSettings() throws IOException { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2); + UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(updateSettingsRequest, expectedParams); + RequestConvertersTests.setRandomTimeout(updateSettingsRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + RequestConvertersTests.setRandomIndicesOptions(updateSettingsRequest::indicesOptions, updateSettingsRequest::indicesOptions, + expectedParams); + if (ESTestCase.randomBoolean()) { + updateSettingsRequest.setPreserveExisting(ESTestCase.randomBoolean()); + if (updateSettingsRequest.isPreserveExisting()) { + expectedParams.put("preserve_existing", "true"); + } + } + + Request request = IndicesRequestConverters.indexPutSettings(updateSettingsRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + } + endpoint.add("_settings"); + Assert.assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + Assert.assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + RequestConvertersTests.assertToXContentBody(updateSettingsRequest, request.getEntity()); + Assert.assertEquals(expectedParams, request.getParameters()); + } + + public void testPutTemplateRequest() throws Exception { + Map names = new HashMap<>(); + names.put("log", "log"); + names.put("template#1", "template%231"); + names.put("-#template", "-%23template"); + names.put("foo^bar", "foo%5Ebar"); + + PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(ESTestCase.randomFrom(names.keySet())) + .patterns(Arrays.asList(ESTestCase.generateRandomStringArray(20, 100, false, false))); + if (ESTestCase.randomBoolean()) { + putTemplateRequest.order(ESTestCase.randomInt()); + } + if (ESTestCase.randomBoolean()) { + putTemplateRequest.version(ESTestCase.randomInt()); + } + if (ESTestCase.randomBoolean()) { + putTemplateRequest.settings(Settings.builder().put("setting-" + ESTestCase.randomInt(), ESTestCase.randomTimeValue())); + } + if (ESTestCase.randomBoolean()) { + putTemplateRequest.mapping("doc-" + ESTestCase.randomInt(), + "field-" + ESTestCase.randomInt(), "type=" + ESTestCase.randomFrom("text", "keyword")); + } + if (ESTestCase.randomBoolean()) { + putTemplateRequest.alias(new Alias("alias-" + ESTestCase.randomInt())); + } + Map expectedParams = new HashMap<>(); + if (ESTestCase.randomBoolean()) { + expectedParams.put("create", Boolean.TRUE.toString()); + putTemplateRequest.create(true); + } + if (ESTestCase.randomBoolean()) { + String cause = ESTestCase.randomUnicodeOfCodepointLengthBetween(1, 50); + putTemplateRequest.cause(cause); + expectedParams.put("cause", cause); + } + RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); + Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); + Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + RequestConvertersTests.assertToXContentBody(putTemplateRequest, request.getEntity()); + } + + public void testValidateQuery() throws Exception { + String[] indices = ESTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 5); + String[] types = ESTestCase.randomBoolean() ? ESTestCase.generateRandomStringArray(5, 5, false, false) : null; + ValidateQueryRequest validateQueryRequest; + if (ESTestCase.randomBoolean()) { + validateQueryRequest = new ValidateQueryRequest(indices); + } else { + validateQueryRequest = new ValidateQueryRequest(); + validateQueryRequest.indices(indices); + } + validateQueryRequest.types(types); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, + expectedParams); + validateQueryRequest.explain(ESTestCase.randomBoolean()); + validateQueryRequest.rewrite(ESTestCase.randomBoolean()); + validateQueryRequest.allShards(ESTestCase.randomBoolean()); + expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain())); + expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards())); + Request request = IndicesRequestConverters.validateQuery(validateQueryRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + if (types != null && types.length > 0) { + endpoint.add(String.join(",", types)); + } + } + endpoint.add("_validate/query"); + Assert.assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + RequestConvertersTests.assertToXContentBody(validateQueryRequest, request.getEntity()); + Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + } + + public void testGetTemplateRequest() throws Exception { + Map encodes = new HashMap<>(); + encodes.put("log", "log"); + encodes.put("1", "1"); + encodes.put("template#1", "template%231"); + encodes.put("template-*", "template-*"); + encodes.put("foo^bar", "foo%5Ebar"); + List names = ESTestCase.randomSubsetOf(1, encodes.keySet()); + GetIndexTemplatesRequest getTemplatesRequest = new GetIndexTemplatesRequest().names(names.toArray(new String[0])); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest, expectedParams); + RequestConvertersTests.setRandomLocal(getTemplatesRequest, expectedParams); + Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest); + Assert.assertThat(request.getEndpoint(), + equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(",")))); + Assert.assertThat(request.getParameters(), equalTo(expectedParams)); + Assert.assertThat(request.getEntity(), nullValue()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 4ef8e8542c9..6d073a7a60a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -27,37 +27,12 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ByteArrayEntity; import org.apache.http.util.EntityUtils; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; -import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; -import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; -import org.elasticsearch.action.admin.indices.flush.FlushRequest; -import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; -import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; -import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeRequest; -import org.elasticsearch.action.admin.indices.shrink.ResizeType; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; -import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; -import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -81,22 +56,18 @@ import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.common.CheckedBiConsumer; -import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; @@ -141,16 +112,11 @@ import java.util.StringJoiner; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; -import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.RequestConverters.enforceSameContentType; -import static org.elasticsearch.index.RandomCreateIndexGenerator.randomAliases; -import static org.elasticsearch.index.RandomCreateIndexGenerator.randomCreateIndexRequest; -import static org.elasticsearch.index.RandomCreateIndexGenerator.randomIndexSettings; -import static org.elasticsearch.index.alias.RandomAliasActionsGenerator.randomAliasAction; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; @@ -261,30 +227,6 @@ public class RequestConvertersTests extends ESTestCase { getAndExistsTest(RequestConverters::exists, HttpHead.METHOD_NAME); } - public void testIndicesExist() { - String[] indices = randomIndicesNames(1, 10); - - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indices); - - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - setRandomLocal(getIndexRequest, expectedParams); - setRandomHumanReadable(getIndexRequest, expectedParams); - setRandomIncludeDefaults(getIndexRequest, expectedParams); - - final Request request = RequestConverters.indicesExist(getIndexRequest); - - assertEquals(HttpHead.METHOD_NAME, request.getMethod()); - assertEquals("/" + String.join(",", indices), request.getEndpoint()); - assertThat(expectedParams, equalTo(request.getParameters())); - assertNull(request.getEntity()); - } - - public void testIndicesExistEmptyIndices() { - expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest())); - expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null))); - } - private static void getAndExistsTest(Function requestConverter, String method) { String index = randomAlphaOfLengthBetween(3, 10); String type = randomAlphaOfLengthBetween(3, 10); @@ -343,41 +285,6 @@ public class RequestConvertersTests extends ESTestCase { assertEquals(method, request.getMethod()); } - public void testCreateIndex() throws IOException { - CreateIndexRequest createIndexRequest = randomCreateIndexRequest(); - - Map expectedParams = new HashMap<>(); - setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(createIndexRequest, expectedParams); - setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); - - Request request = RequestConverters.createIndex(createIndexRequest); - assertEquals("/" + createIndexRequest.index(), request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - assertToXContentBody(createIndexRequest, request.getEntity()); - } - - public void testCreateIndexNullIndex() { - ActionRequestValidationException validationException = new CreateIndexRequest(null).validate(); - assertNotNull(validationException); - } - - public void testUpdateAliases() throws IOException { - IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); - AliasActions aliasAction = randomAliasAction(); - indicesAliasesRequest.addAliasAction(aliasAction); - - Map expectedParams = new HashMap<>(); - setRandomTimeout(indicesAliasesRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(indicesAliasesRequest, expectedParams); - - Request request = RequestConverters.updateAliases(indicesAliasesRequest); - assertEquals("/_aliases", request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertToXContentBody(indicesAliasesRequest, request.getEntity()); - } - public void testReindex() throws IOException { ReindexRequest reindexRequest = new ReindexRequest(); reindexRequest.setSourceIndices("source_idx"); @@ -537,282 +444,6 @@ public class RequestConvertersTests extends ESTestCase { assertToXContentBody(deleteByQueryRequest, request.getEntity()); } - public void testPutMapping() throws IOException { - PutMappingRequest putMappingRequest = new PutMappingRequest(); - - String[] indices = randomIndicesNames(0, 5); - putMappingRequest.indices(indices); - - String type = randomAlphaOfLengthBetween(3, 10); - putMappingRequest.type(type); - - Map expectedParams = new HashMap<>(); - - setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(putMappingRequest, expectedParams); - - Request request = RequestConverters.putMapping(putMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - endpoint.add(type); - assertEquals(endpoint.toString(), request.getEndpoint()); - - assertEquals(expectedParams, request.getParameters()); - assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - assertToXContentBody(putMappingRequest, request.getEntity()); - } - - public void testGetMapping() throws IOException { - GetMappingsRequest getMappingRequest = new GetMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = randomIndicesNames(0, 5); - getMappingRequest.indices(indices); - } else if (randomBoolean()) { - getMappingRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getMappingRequest.types(type); - } else if (randomBoolean()) { - getMappingRequest.types((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - setRandomIndicesOptions(getMappingRequest::indicesOptions, getMappingRequest::indicesOptions, expectedParams); - setRandomMasterTimeout(getMappingRequest, expectedParams); - setRandomLocal(getMappingRequest, expectedParams); - - Request request = RequestConverters.getMappings(getMappingRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - assertThat(expectedParams, equalTo(request.getParameters())); - assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - - public void testGetFieldMapping() throws IOException { - GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); - - String[] indices = Strings.EMPTY_ARRAY; - if (randomBoolean()) { - indices = randomIndicesNames(0, 5); - getFieldMappingsRequest.indices(indices); - } else if (randomBoolean()) { - getFieldMappingsRequest.indices((String[]) null); - } - - String type = null; - if (randomBoolean()) { - type = randomAlphaOfLengthBetween(3, 10); - getFieldMappingsRequest.types(type); - } else if (randomBoolean()) { - getFieldMappingsRequest.types((String[]) null); - } - - String[] fields = null; - if (randomBoolean()) { - fields = new String[randomIntBetween(1, 5)]; - for (int i = 0; i < fields.length; i++) { - fields[i] = randomAlphaOfLengthBetween(3, 10); - } - getFieldMappingsRequest.fields(fields); - } else if (randomBoolean()) { - getFieldMappingsRequest.fields((String[]) null); - } - - Map expectedParams = new HashMap<>(); - - setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, expectedParams); - setRandomLocal(getFieldMappingsRequest::local, expectedParams); - - Request request = RequestConverters.getFieldMapping(getFieldMappingsRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - String index = String.join(",", indices); - if (Strings.hasLength(index)) { - endpoint.add(index); - } - endpoint.add("_mapping"); - if (type != null) { - endpoint.add(type); - } - endpoint.add("field"); - if (fields != null) { - endpoint.add(String.join(",", fields)); - } - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - - assertThat(expectedParams, equalTo(request.getParameters())); - assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - } - - public void testDeleteIndex() { - String[] indices = randomIndicesNames(0, 5); - DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); - - Map expectedParams = new HashMap<>(); - setRandomTimeout(deleteIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(deleteIndexRequest, expectedParams); - - setRandomIndicesOptions(deleteIndexRequest::indicesOptions, deleteIndexRequest::indicesOptions, expectedParams); - - Request request = RequestConverters.deleteIndex(deleteIndexRequest); - assertEquals("/" + String.join(",", indices), request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); - assertNull(request.getEntity()); - } - - public void testGetSettings() throws IOException { - String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5); - - GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest); - - Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(getSettingsRequest, expectedParams); - setRandomIndicesOptions(getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, expectedParams); - - setRandomLocal(getSettingsRequest, expectedParams); - - if (randomBoolean()) { - // the request object will not have include_defaults present unless it is set to - // true - getSettingsRequest.includeDefaults(randomBoolean()); - if (getSettingsRequest.includeDefaults()) { - expectedParams.put("include_defaults", Boolean.toString(true)); - } - } - - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indicesUnderTest != null && indicesUnderTest.length > 0) { - endpoint.add(String.join(",", indicesUnderTest)); - } - endpoint.add("_settings"); - - if (randomBoolean()) { - String[] names = randomBoolean() ? null : new String[randomIntBetween(0, 3)]; - if (names != null) { - for (int x = 0; x < names.length; x++) { - names[x] = randomAlphaOfLengthBetween(3, 10); - } - } - getSettingsRequest.names(names); - if (names != null && names.length > 0) { - endpoint.add(String.join(",", names)); - } - } - - Request request = RequestConverters.getSettings(getSettingsRequest); - - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - assertThat(request.getEntity(), nullValue()); - } - - public void testGetIndex() throws IOException { - String[] indicesUnderTest = randomBoolean() ? null : randomIndicesNames(0, 5); - - GetIndexRequest getIndexRequest = new GetIndexRequest().indices(indicesUnderTest); - - Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(getIndexRequest, expectedParams); - setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); - setRandomLocal(getIndexRequest, expectedParams); - setRandomHumanReadable(getIndexRequest, expectedParams); - - if (randomBoolean()) { - // the request object will not have include_defaults present unless it is set to - // true - getIndexRequest.includeDefaults(randomBoolean()); - if (getIndexRequest.includeDefaults()) { - expectedParams.put("include_defaults", Boolean.toString(true)); - } - } - - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indicesUnderTest != null && indicesUnderTest.length > 0) { - endpoint.add(String.join(",", indicesUnderTest)); - } - - Request request = RequestConverters.getIndex(getIndexRequest); - - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - assertThat(request.getEntity(), nullValue()); - } - - public void testDeleteIndexEmptyIndices() { - String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY; - ActionRequestValidationException validationException = new DeleteIndexRequest(indices).validate(); - assertNotNull(validationException); - } - - public void testOpenIndex() { - String[] indices = randomIndicesNames(1, 5); - OpenIndexRequest openIndexRequest = new OpenIndexRequest(indices); - openIndexRequest.indices(indices); - - Map expectedParams = new HashMap<>(); - setRandomTimeout(openIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(openIndexRequest, expectedParams); - setRandomIndicesOptions(openIndexRequest::indicesOptions, openIndexRequest::indicesOptions, expectedParams); - setRandomWaitForActiveShards(openIndexRequest::waitForActiveShards, expectedParams); - - Request request = RequestConverters.openIndex(openIndexRequest); - StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_open"); - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - assertThat(request.getEntity(), nullValue()); - } - - public void testOpenIndexEmptyIndices() { - String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY; - ActionRequestValidationException validationException = new OpenIndexRequest(indices).validate(); - assertNotNull(validationException); - } - - public void testCloseIndex() { - String[] indices = randomIndicesNames(1, 5); - CloseIndexRequest closeIndexRequest = new CloseIndexRequest(indices); - - Map expectedParams = new HashMap<>(); - setRandomTimeout(closeIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(closeIndexRequest, expectedParams); - setRandomIndicesOptions(closeIndexRequest::indicesOptions, closeIndexRequest::indicesOptions, expectedParams); - - Request request = RequestConverters.closeIndex(closeIndexRequest); - StringJoiner endpoint = new StringJoiner("/", "/", "").add(String.join(",", indices)).add("_close"); - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - assertThat(request.getEntity(), nullValue()); - } - - public void testCloseIndexEmptyIndices() { - String[] indices = randomBoolean() ? null : Strings.EMPTY_ARRAY; - ActionRequestValidationException validationException = new CloseIndexRequest(indices).validate(); - assertNotNull(validationException); - } - public void testIndex() throws IOException { String index = randomAlphaOfLengthBetween(3, 10); String type = randomAlphaOfLengthBetween(3, 10); @@ -886,161 +517,6 @@ public class RequestConvertersTests extends ESTestCase { } } - public void testRefresh() { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - RefreshRequest refreshRequest; - if (randomBoolean()) { - refreshRequest = new RefreshRequest(indices); - } else { - refreshRequest = new RefreshRequest(); - refreshRequest.indices(indices); - } - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(refreshRequest::indicesOptions, refreshRequest::indicesOptions, expectedParams); - Request request = RequestConverters.refresh(refreshRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_refresh"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - } - - public void testFlush() { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - FlushRequest flushRequest; - if (randomBoolean()) { - flushRequest = new FlushRequest(indices); - } else { - flushRequest = new FlushRequest(); - flushRequest.indices(indices); - } - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(flushRequest::indicesOptions, flushRequest::indicesOptions, expectedParams); - if (randomBoolean()) { - flushRequest.force(randomBoolean()); - } - expectedParams.put("force", Boolean.toString(flushRequest.force())); - if (randomBoolean()) { - flushRequest.waitIfOngoing(randomBoolean()); - } - expectedParams.put("wait_if_ongoing", Boolean.toString(flushRequest.waitIfOngoing())); - - Request request = RequestConverters.flush(flushRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_flush"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - } - - public void testSyncedFlush() { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - SyncedFlushRequest syncedFlushRequest; - if (randomBoolean()) { - syncedFlushRequest = new SyncedFlushRequest(indices); - } else { - syncedFlushRequest = new SyncedFlushRequest(); - syncedFlushRequest.indices(indices); - } - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(syncedFlushRequest::indicesOptions, syncedFlushRequest::indicesOptions, expectedParams); - Request request = RequestConverters.flushSynced(syncedFlushRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_flush/synced"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - } - - public void testForceMerge() { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - ForceMergeRequest forceMergeRequest; - if (randomBoolean()) { - forceMergeRequest = new ForceMergeRequest(indices); - } else { - forceMergeRequest = new ForceMergeRequest(); - forceMergeRequest.indices(indices); - } - - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(forceMergeRequest::indicesOptions, forceMergeRequest::indicesOptions, expectedParams); - if (randomBoolean()) { - forceMergeRequest.maxNumSegments(randomInt()); - } - expectedParams.put("max_num_segments", Integer.toString(forceMergeRequest.maxNumSegments())); - if (randomBoolean()) { - forceMergeRequest.onlyExpungeDeletes(randomBoolean()); - } - expectedParams.put("only_expunge_deletes", Boolean.toString(forceMergeRequest.onlyExpungeDeletes())); - if (randomBoolean()) { - forceMergeRequest.flush(randomBoolean()); - } - expectedParams.put("flush", Boolean.toString(forceMergeRequest.flush())); - - Request request = RequestConverters.forceMerge(forceMergeRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_forcemerge"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - } - - public void testClearCache() { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - ClearIndicesCacheRequest clearIndicesCacheRequest; - if (randomBoolean()) { - clearIndicesCacheRequest = new ClearIndicesCacheRequest(indices); - } else { - clearIndicesCacheRequest = new ClearIndicesCacheRequest(); - clearIndicesCacheRequest.indices(indices); - } - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(clearIndicesCacheRequest::indicesOptions, clearIndicesCacheRequest::indicesOptions, expectedParams); - if (randomBoolean()) { - clearIndicesCacheRequest.queryCache(randomBoolean()); - } - expectedParams.put("query", Boolean.toString(clearIndicesCacheRequest.queryCache())); - if (randomBoolean()) { - clearIndicesCacheRequest.fieldDataCache(randomBoolean()); - } - expectedParams.put("fielddata", Boolean.toString(clearIndicesCacheRequest.fieldDataCache())); - if (randomBoolean()) { - clearIndicesCacheRequest.requestCache(randomBoolean()); - } - expectedParams.put("request", Boolean.toString(clearIndicesCacheRequest.requestCache())); - if (randomBoolean()) { - clearIndicesCacheRequest.fields(randomIndicesNames(1, 5)); - expectedParams.put("fields", String.join(",", clearIndicesCacheRequest.fields())); - } - - Request request = RequestConverters.clearCache(clearIndicesCacheRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_cache/clear"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - assertThat(request.getMethod(), equalTo(HttpPost.METHOD_NAME)); - } - public void testUpdate() throws IOException { XContentType xContentType = randomFrom(XContentType.values()); @@ -1598,54 +1074,6 @@ public class RequestConvertersTests extends ESTestCase { assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity))); } - public void testExistsAlias() { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - getAliasesRequest.indices(indices); - // the HEAD endpoint requires at least an alias or an index - boolean hasIndices = indices != null && indices.length > 0; - String[] aliases; - if (hasIndices) { - aliases = randomBoolean() ? null : randomIndicesNames(0, 5); - } else { - aliases = randomIndicesNames(1, 5); - } - getAliasesRequest.aliases(aliases); - Map expectedParams = new HashMap<>(); - setRandomLocal(getAliasesRequest, expectedParams); - setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, expectedParams); - - Request request = RequestConverters.existsAlias(getAliasesRequest); - StringJoiner expectedEndpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - expectedEndpoint.add(String.join(",", indices)); - } - expectedEndpoint.add("_alias"); - if (aliases != null && aliases.length > 0) { - expectedEndpoint.add(String.join(",", aliases)); - } - assertEquals(HttpHead.METHOD_NAME, request.getMethod()); - assertEquals(expectedEndpoint.toString(), request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertNull(request.getEntity()); - } - - public void testExistsAliasNoAliasNoIndex() { - { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> RequestConverters.existsAlias(getAliasesRequest)); - assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); - } - { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null); - getAliasesRequest.indices((String[]) null); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> RequestConverters.existsAlias(getAliasesRequest)); - assertEquals("existsAlias requires at least an alias or an index", iae.getMessage()); - } - } - public void testExplain() throws IOException { String index = randomAlphaOfLengthBetween(3, 10); String type = randomAlphaOfLengthBetween(3, 10); @@ -1749,245 +1177,6 @@ public class RequestConvertersTests extends ESTestCase { assertToXContentBody(spec, request.getEntity()); } - public void testSplit() throws IOException { - resizeTest(ResizeType.SPLIT, RequestConverters::split); - } - - public void testSplitWrongResizeType() { - ResizeRequest resizeRequest = new ResizeRequest("target", "source"); - resizeRequest.setResizeType(ResizeType.SHRINK); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> RequestConverters.split(resizeRequest)); - assertEquals("Wrong resize type [SHRINK] for indices split request", iae.getMessage()); - } - - public void testShrinkWrongResizeType() { - ResizeRequest resizeRequest = new ResizeRequest("target", "source"); - resizeRequest.setResizeType(ResizeType.SPLIT); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> RequestConverters.shrink(resizeRequest)); - assertEquals("Wrong resize type [SPLIT] for indices shrink request", iae.getMessage()); - } - - public void testShrink() throws IOException { - resizeTest(ResizeType.SHRINK, RequestConverters::shrink); - } - - private static void resizeTest(ResizeType resizeType, CheckedFunction function) - throws IOException { - String[] indices = randomIndicesNames(2, 2); - ResizeRequest resizeRequest = new ResizeRequest(indices[0], indices[1]); - resizeRequest.setResizeType(resizeType); - Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(resizeRequest, expectedParams); - setRandomTimeout(resizeRequest::timeout, resizeRequest.timeout(), expectedParams); - - if (randomBoolean()) { - CreateIndexRequest createIndexRequest = new CreateIndexRequest(randomAlphaOfLengthBetween(3, 10)); - if (randomBoolean()) { - createIndexRequest.settings(randomIndexSettings()); - } - if (randomBoolean()) { - randomAliases(createIndexRequest); - } - resizeRequest.setTargetIndex(createIndexRequest); - } - setRandomWaitForActiveShards(resizeRequest::setWaitForActiveShards, expectedParams); - - Request request = function.apply(resizeRequest); - assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - String expectedEndpoint = "/" + resizeRequest.getSourceIndex() + "/_" + resizeType.name().toLowerCase(Locale.ROOT) + "/" - + resizeRequest.getTargetIndexRequest().index(); - assertEquals(expectedEndpoint, request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertToXContentBody(resizeRequest, request.getEntity()); - } - - public void testRollover() throws IOException { - RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10), - randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10)); - Map expectedParams = new HashMap<>(); - setRandomTimeout(rolloverRequest::timeout, rolloverRequest.timeout(), expectedParams); - setRandomMasterTimeout(rolloverRequest, expectedParams); - if (randomBoolean()) { - rolloverRequest.dryRun(randomBoolean()); - if (rolloverRequest.isDryRun()) { - expectedParams.put("dry_run", "true"); - } - } - if (randomBoolean()) { - rolloverRequest.addMaxIndexAgeCondition(new TimeValue(randomNonNegativeLong())); - } - if (randomBoolean()) { - String type = randomAlphaOfLengthBetween(3, 10); - rolloverRequest.getCreateIndexRequest().mapping(type, RandomCreateIndexGenerator.randomMapping(type)); - } - if (randomBoolean()) { - RandomCreateIndexGenerator.randomAliases(rolloverRequest.getCreateIndexRequest()); - } - if (randomBoolean()) { - rolloverRequest.getCreateIndexRequest().settings(RandomCreateIndexGenerator.randomIndexSettings()); - } - setRandomWaitForActiveShards(rolloverRequest.getCreateIndexRequest()::waitForActiveShards, expectedParams); - - Request request = RequestConverters.rollover(rolloverRequest); - if (rolloverRequest.getNewIndexName() == null) { - assertEquals("/" + rolloverRequest.getAlias() + "/_rollover", request.getEndpoint()); - } else { - assertEquals("/" + rolloverRequest.getAlias() + "/_rollover/" + rolloverRequest.getNewIndexName(), request.getEndpoint()); - } - assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - assertToXContentBody(rolloverRequest, request.getEntity()); - assertEquals(expectedParams, request.getParameters()); - } - - public void testGetAlias() { - GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); - - Map expectedParams = new HashMap<>(); - setRandomLocal(getAliasesRequest, expectedParams); - setRandomIndicesOptions(getAliasesRequest::indicesOptions, getAliasesRequest::indicesOptions, expectedParams); - - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 2); - String[] aliases = randomBoolean() ? null : randomIndicesNames(0, 2); - getAliasesRequest.indices(indices); - getAliasesRequest.aliases(aliases); - - Request request = RequestConverters.getAlias(getAliasesRequest); - StringJoiner expectedEndpoint = new StringJoiner("/", "/", ""); - - if (false == CollectionUtils.isEmpty(indices)) { - expectedEndpoint.add(String.join(",", indices)); - } - expectedEndpoint.add("_alias"); - - if (false == CollectionUtils.isEmpty(aliases)) { - expectedEndpoint.add(String.join(",", aliases)); - } - - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals(expectedEndpoint.toString(), request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertNull(request.getEntity()); - } - - public void testIndexPutSettings() throws IOException { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 2); - UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices); - Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(updateSettingsRequest, expectedParams); - setRandomTimeout(updateSettingsRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomIndicesOptions(updateSettingsRequest::indicesOptions, updateSettingsRequest::indicesOptions, expectedParams); - if (randomBoolean()) { - updateSettingsRequest.setPreserveExisting(randomBoolean()); - if (updateSettingsRequest.isPreserveExisting()) { - expectedParams.put("preserve_existing", "true"); - } - } - - Request request = RequestConverters.indexPutSettings(updateSettingsRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - } - endpoint.add("_settings"); - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - assertToXContentBody(updateSettingsRequest, request.getEntity()); - assertEquals(expectedParams, request.getParameters()); - } - - public void testPutTemplateRequest() throws Exception { - Map names = new HashMap<>(); - names.put("log", "log"); - names.put("template#1", "template%231"); - names.put("-#template", "-%23template"); - names.put("foo^bar", "foo%5Ebar"); - - PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(randomFrom(names.keySet())) - .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); - if (randomBoolean()) { - putTemplateRequest.order(randomInt()); - } - if (randomBoolean()) { - putTemplateRequest.version(randomInt()); - } - if (randomBoolean()) { - putTemplateRequest.settings(Settings.builder().put("setting-" + randomInt(), randomTimeValue())); - } - if (randomBoolean()) { - putTemplateRequest.mapping("doc-" + randomInt(), "field-" + randomInt(), "type=" + randomFrom("text", "keyword")); - } - if (randomBoolean()) { - putTemplateRequest.alias(new Alias("alias-" + randomInt())); - } - Map expectedParams = new HashMap<>(); - if (randomBoolean()) { - expectedParams.put("create", Boolean.TRUE.toString()); - putTemplateRequest.create(true); - } - if (randomBoolean()) { - String cause = randomUnicodeOfCodepointLengthBetween(1, 50); - putTemplateRequest.cause(cause); - expectedParams.put("cause", cause); - } - setRandomMasterTimeout(putTemplateRequest, expectedParams); - Request request = RequestConverters.putTemplate(putTemplateRequest); - assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertToXContentBody(putTemplateRequest, request.getEntity()); - } - - public void testValidateQuery() throws Exception { - String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); - String[] types = randomBoolean() ? generateRandomStringArray(5, 5, false, false) : null; - ValidateQueryRequest validateQueryRequest; - if (randomBoolean()) { - validateQueryRequest = new ValidateQueryRequest(indices); - } else { - validateQueryRequest = new ValidateQueryRequest(); - validateQueryRequest.indices(indices); - } - validateQueryRequest.types(types); - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams); - validateQueryRequest.explain(randomBoolean()); - validateQueryRequest.rewrite(randomBoolean()); - validateQueryRequest.allShards(randomBoolean()); - expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain())); - expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite())); - expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards())); - Request request = RequestConverters.validateQuery(validateQueryRequest); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - if (indices != null && indices.length > 0) { - endpoint.add(String.join(",", indices)); - if (types != null && types.length > 0) { - endpoint.add(String.join(",", types)); - } - } - endpoint.add("_validate/query"); - assertThat(request.getEndpoint(), equalTo(endpoint.toString())); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertToXContentBody(validateQueryRequest, request.getEntity()); - assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - } - - public void testGetTemplateRequest() throws Exception { - Map encodes = new HashMap<>(); - encodes.put("log", "log"); - encodes.put("1", "1"); - encodes.put("template#1", "template%231"); - encodes.put("template-*", "template-*"); - encodes.put("foo^bar", "foo%5Ebar"); - List names = randomSubsetOf(1, encodes.keySet()); - GetIndexTemplatesRequest getTemplatesRequest = new GetIndexTemplatesRequest().names(names.toArray(new String[0])); - Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(getTemplatesRequest, expectedParams); - setRandomLocal(getTemplatesRequest, expectedParams); - Request request = RequestConverters.getTemplates(getTemplatesRequest); - assertThat(request.getEndpoint(), equalTo("/_template/" + names.stream().map(encodes::get).collect(Collectors.joining(",")))); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), nullValue()); - } - public void testPutScript() throws Exception { PutStoredScriptRequest putStoredScriptRequest = new PutStoredScriptRequest(); @@ -2289,7 +1478,7 @@ public class RequestConvertersTests extends ESTestCase { } } - private static void setRandomIncludeDefaults(GetIndexRequest request, Map expectedParams) { + static void setRandomIncludeDefaults(GetIndexRequest request, Map expectedParams) { if (randomBoolean()) { boolean includeDefaults = randomBoolean(); request.includeDefaults(includeDefaults); @@ -2299,7 +1488,7 @@ public class RequestConvertersTests extends ESTestCase { } } - private static void setRandomHumanReadable(GetIndexRequest request, Map expectedParams) { + static void setRandomHumanReadable(GetIndexRequest request, Map expectedParams) { if (randomBoolean()) { boolean humanReadable = randomBoolean(); request.humanReadable(humanReadable); @@ -2309,7 +1498,7 @@ public class RequestConvertersTests extends ESTestCase { } } - private static void setRandomLocal(Consumer setter, Map expectedParams) { + static void setRandomLocal(Consumer setter, Map expectedParams) { if (randomBoolean()) { boolean local = randomBoolean(); setter.accept(local); @@ -2343,7 +1532,7 @@ public class RequestConvertersTests extends ESTestCase { } } - private static void setRandomWaitForActiveShards(Consumer setter, Map expectedParams) { + static void setRandomWaitForActiveShards(Consumer setter, Map expectedParams) { setRandomWaitForActiveShards(setter, ActiveShardCount.DEFAULT, expectedParams); } From 012b9c7539471ecde25515cf49b12846af0bc9e0 Mon Sep 17 00:00:00 2001 From: ben5556 <39107453+ben5556@users.noreply.github.com> Date: Tue, 18 Sep 2018 13:21:15 +1200 Subject: [PATCH 06/32] Corrected aggregation name to match the example (#33786) --- docs/reference/aggregations/metrics/sum-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc index 55c1c3f80fa..8825f079521 100644 --- a/docs/reference/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/sum-aggregation.asciidoc @@ -40,7 +40,7 @@ Resulting in: -------------------------------------------------- // TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] -The name of the aggregation (`intraday_return` above) also serves as the key by which the aggregation result can be retrieved from the returned response. +The name of the aggregation (`hat_prices` above) also serves as the key by which the aggregation result can be retrieved from the returned response. ==== Script From a95226bdaeaf13347d1bbd2761e3eeaead47c145 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Mon, 17 Sep 2018 18:29:30 -0700 Subject: [PATCH 07/32] [Monitoring] Removing unused version.* fields (#33584) This PR removes fields that are not actually used by the Monitoring UI. This will greatly simplify the eventual migration to using Metricbeat for monitoring Elasticsearch (see https://github.com/elastic/beats/pull/8260#discussion_r215885868 for more context and discussion around removing these fields from ES collection). --- .../collector/indices/IndexStatsMonitoringDoc.java | 9 --------- .../collector/indices/IndexStatsMonitoringDocTests.java | 4 ---- .../xpack/monitoring/integration/MonitoringIT.java | 2 +- 3 files changed, 1 insertion(+), 14 deletions(-) diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDoc.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDoc.java index f90abb1639d..c72e645dfd0 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDoc.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDoc.java @@ -69,13 +69,6 @@ public class IndexStatsMonitoringDoc extends FilteredMonitoringDoc { builder.field("created", metaData.getCreationDate()); builder.field("status", health.getStatus().name().toLowerCase(Locale.ROOT)); - builder.startObject("version"); - { - builder.field("created", metaData.getCreationVersion()); - builder.field("upgraded", metaData.getUpgradedVersion()); - } - builder.endObject(); - builder.startObject("shards"); { final int total = metaData.getTotalNumberOfShards(); @@ -128,8 +121,6 @@ public class IndexStatsMonitoringDoc extends FilteredMonitoringDoc { "index_stats.uuid", "index_stats.created", "index_stats.status", - "index_stats.version.created", - "index_stats.version.upgraded", "index_stats.shards.total", "index_stats.shards.primaries", "index_stats.shards.replicas", diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java index 278af123625..da9063507da 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexStatsMonitoringDocTests.java @@ -290,10 +290,6 @@ public class IndexStatsMonitoringDocTests extends BaseFilteredMonitoringDocTestC "\"uuid\":\"" + index.getUUID() + "\"," + "\"created\":" + metaData.getCreationDate() + "," + "\"status\":\"" + indexHealth.getStatus().name().toLowerCase(Locale.ROOT) + "\"," + - "\"version\":{" + - "\"created\":\"" + metaData.getCreationVersion() + "\"," + - "\"upgraded\":\"" + metaData.getUpgradedVersion() + "\"" + - "}," + "\"shards\":{" + "\"total\":" + total + "," + "\"primaries\":" + primaries + "," + diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index 77a70f5da57..a6f9a14f28b 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -417,7 +417,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { // particular field values checked in the index stats tests final Map indexStats = (Map) source.get(IndexStatsMonitoringDoc.TYPE); - assertEquals(8, indexStats.size()); + assertEquals(7, indexStats.size()); assertThat((String) indexStats.get("index"), not(isEmptyOrNullString())); assertThat((String) indexStats.get("uuid"), not(isEmptyOrNullString())); assertThat(indexStats.get("created"), notNullValue()); From 2aba52de8f9315b0e384e1c657d7b0401d26a1b0 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Mon, 17 Sep 2018 18:33:43 -0700 Subject: [PATCH 08/32] Implement xpack.monitoring.elasticsearch.collection.enabled setting (#33474) * Implement xpack.monitoring.elasticsearch.collection.enabled setting * Fixing line lengths * Updating constructor calls in test * Removing unused import * Fixing line lengths in test classes * Make monitoringService.isElasticsearchCollectionEnabled() return true for tests * Remove wrong expectation * Adding unit tests for new flag to be false * Fixing line wrapping/indentation for better readability * Adding docs * Fixing logic in ClusterStatsCollector::shouldCollect * Rebasing with master and resolving conflicts * Simplifying implementation by gating scheduling * Doc fixes / improvements * Making methods package private * Fixing wording * Fixing method access --- .../configuring-monitoring.asciidoc | 11 ++++-- .../monitoring/pause-export.asciidoc | 10 +++++ .../settings/monitoring-settings.asciidoc | 11 ++++++ .../xpack/monitoring/Monitoring.java | 1 + .../xpack/monitoring/MonitoringService.java | 38 +++++++++++++++++-- .../indices/IndexRecoveryCollectorTests.java | 2 +- 6 files changed, 64 insertions(+), 9 deletions(-) diff --git a/docs/reference/monitoring/configuring-monitoring.asciidoc b/docs/reference/monitoring/configuring-monitoring.asciidoc index 3bcfef2acbf..6708b791036 100644 --- a/docs/reference/monitoring/configuring-monitoring.asciidoc +++ b/docs/reference/monitoring/configuring-monitoring.asciidoc @@ -13,10 +13,13 @@ indices. You can also adjust how monitoring data is displayed. . To collect monitoring data about your {es} cluster: -.. Verify that the `xpack.monitoring.enabled` and -`xpack.monitoring.collection.enabled` settings are `true` on each node in the -cluster. By default, data collection is disabled. For more information, see -<>. +.. Verify that the `xpack.monitoring.enabled`, +`xpack.monitoring.collection.enabled`, and +`xpack.monitoring.elasticsearch.collection.enabled` settings are `true` on each +node in the cluster. By default xpack.monitoring.collection.enabled is disabled +(`false`), and that overrides xpack.monitoring.elasticsearch.collection.enabled, +which defaults to being enabled (`true`). Both settings can be set dynamically +at runtime. For more information, see <>. .. Optional: Specify which indices you want to monitor. + diff --git a/docs/reference/monitoring/pause-export.asciidoc b/docs/reference/monitoring/pause-export.asciidoc index 128e72a463c..7a8bc664ffc 100644 --- a/docs/reference/monitoring/pause-export.asciidoc +++ b/docs/reference/monitoring/pause-export.asciidoc @@ -16,6 +16,16 @@ monitoring data from other sources such as {kib}, Beats, and Logstash is ignored You can update this setting by using the {ref}/cluster-update-settings.html[Cluster Update Settings API]. +If you want to collect data from sources such as {kib}, Beats, and Logstash but +not collect data about your {es} cluster, you can disable data collection +just for {es}: + +[source,yaml] +--------------------------------------------------- +xpack.monitoring.collection.enabled: true +xpack.monitoring.elasticsearch.collection.enabled: false +--------------------------------------------------- + If you want to separately disable a specific exporter, you can specify the `enabled` setting (which defaults to `true`) per exporter. For example: diff --git a/docs/reference/settings/monitoring-settings.asciidoc b/docs/reference/settings/monitoring-settings.asciidoc index 2759944e615..a039084412c 100644 --- a/docs/reference/settings/monitoring-settings.asciidoc +++ b/docs/reference/settings/monitoring-settings.asciidoc @@ -66,6 +66,17 @@ option in `kibana.yml` to the same value. You can update this setting through the <>. +`xpack.monitoring.elasticsearch.collection.enabled`:: + +Controls whether statistics about your {es} cluster should be collected. Defaults to `true`. +This is different from xpack.monitoring.collection.enabled, which allows you to enable or disable +all monitoring collection. However, this setting simply disables the collection of Elasticsearch +data while still allowing other data (e.g., Kibana, Logstash, Beats, or APM Server monitoring data) +to pass through this cluster. ++ +You can update this setting through the +<>. + `xpack.monitoring.collection.cluster.stats.timeout`:: Sets the timeout for collecting the cluster statistics. Defaults to `10s`. diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index bb2ed76831d..027cb7de937 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -174,6 +174,7 @@ public class Monitoring extends Plugin implements ActionPlugin { settings.add(MonitoringField.HISTORY_DURATION); settings.add(CLEAN_WATCHER_HISTORY); settings.add(MonitoringService.ENABLED); + settings.add(MonitoringService.ELASTICSEARCH_COLLECTION_ENABLED); settings.add(MonitoringService.INTERVAL); settings.add(Collector.INDICES); settings.add(ClusterStatsCollector.CLUSTER_STATS_TIMEOUT); diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java index 07d24826f86..073a4cf785c 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringService.java @@ -43,8 +43,21 @@ public class MonitoringService extends AbstractLifecycleComponent { */ public static final TimeValue MIN_INTERVAL = TimeValue.timeValueSeconds(1L); + /* + * Dynamically controls enabling or disabling the collection of Monitoring data only from Elasticsearch. + *

+ * This should only be used while transitioning to Metricbeat-based data collection for Elasticsearch with + * {@linkplain #ENABLED} set to {@code true}. By setting this to {@code false} and that value to {@code true}, + * Kibana, Logstash, Beats, and APM Server can all continue to report their stats through this cluster until they + * are transitioned to being monitored by Metricbeat as well. + */ + public static final Setting ELASTICSEARCH_COLLECTION_ENABLED = + Setting.boolSetting("xpack.monitoring.elasticsearch.collection.enabled", true, + Setting.Property.Dynamic, Setting.Property.NodeScope); + /** - * Dynamically controls enabling or disabling the collection of Monitoring data. + * Dynamically controls enabling or disabling the collection of Monitoring data from Elasticsearch as well as other products + * in the stack. */ public static final Setting ENABLED = Setting.boolSetting("xpack.monitoring.collection.enabled", false, @@ -68,6 +81,7 @@ public class MonitoringService extends AbstractLifecycleComponent { private final Set collectors; private final Exporters exporters; + private volatile boolean elasticsearchCollectionEnabled; private volatile boolean enabled; private volatile TimeValue interval; private volatile ThreadPool.Cancellable scheduler; @@ -79,13 +93,21 @@ public class MonitoringService extends AbstractLifecycleComponent { this.threadPool = Objects.requireNonNull(threadPool); this.collectors = Objects.requireNonNull(collectors); this.exporters = Objects.requireNonNull(exporters); + this.elasticsearchCollectionEnabled = ELASTICSEARCH_COLLECTION_ENABLED.get(settings); this.enabled = ENABLED.get(settings); this.interval = INTERVAL.get(settings); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(ELASTICSEARCH_COLLECTION_ENABLED, this::setElasticsearchCollectionEnabled); clusterService.getClusterSettings().addSettingsUpdateConsumer(ENABLED, this::setMonitoringActive); clusterService.getClusterSettings().addSettingsUpdateConsumer(INTERVAL, this::setInterval); } + void setElasticsearchCollectionEnabled(final boolean enabled) { + this.elasticsearchCollectionEnabled = enabled; + scheduleExecution(); + } + void setMonitoringActive(final boolean enabled) { this.enabled = enabled; scheduleExecution(); @@ -104,6 +126,14 @@ public class MonitoringService extends AbstractLifecycleComponent { return isStarted() && enabled; } + boolean isElasticsearchCollectionEnabled() { + return this.elasticsearchCollectionEnabled; + } + + boolean shouldScheduleExecution() { + return isElasticsearchCollectionEnabled() && isMonitoringActive(); + } + private String threadPoolName() { return ThreadPool.Names.GENERIC; } @@ -155,7 +185,7 @@ public class MonitoringService extends AbstractLifecycleComponent { if (scheduler != null) { cancelExecution(); } - if (isMonitoringActive()) { + if (shouldScheduleExecution()) { scheduler = threadPool.scheduleWithFixedDelay(monitor, interval, threadPoolName()); } } @@ -188,7 +218,7 @@ public class MonitoringService extends AbstractLifecycleComponent { @Override public void doRun() { - if (isMonitoringActive() == false) { + if (shouldScheduleExecution() == false) { logger.debug("monitoring execution is skipped"); return; } @@ -223,7 +253,7 @@ public class MonitoringService extends AbstractLifecycleComponent { new ParameterizedMessage("monitoring collector [{}] failed to collect data", collector.name()), e); } } - if (isMonitoringActive()) { + if (shouldScheduleExecution()) { exporters.export(results, ActionListener.wrap(r -> semaphore.release(), this::onFailure)); } else { semaphore.release(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java index 47504736d26..f4484aa5ed7 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryCollectorTests.java @@ -182,4 +182,4 @@ public class IndexRecoveryCollectorTests extends BaseCollectorTestCase { assertThat(recoveries.shardRecoveryStates().size(), equalTo(nbRecoveries)); } } -} \ No newline at end of file +} From 615f494c7783bf2e689392213954a341812b0802 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 18 Sep 2018 07:25:22 +0200 Subject: [PATCH 09/32] MINOR: Drop Redundant Ctx. Check in ScriptService (#33782) * MINOR: Drop Redundant Ctx. Check in ScriptService * This check is completely redundant, the expression script engine will throw anyway (and with a similar message) for those contexts that it cannot compile. Moreover, the update context is not the only context that is not suported by the expression engine at this point so handling the update context separately here makes no sense. --- .../script/expression/MoreExpressionTests.java | 2 +- .../script/expression/StoredExpressionTests.java | 2 +- .../java/org/elasticsearch/script/ScriptService.java | 9 --------- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index 932e5979c0f..6d7ab1d2595 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -582,7 +582,7 @@ public class MoreExpressionTests extends ESIntegTestCase { String message = e.getMessage(); assertThat(message + " should have contained failed to execute", message.contains("failed to execute"), equalTo(true)); message = e.getCause().getMessage(); - assertThat(message + " should have contained not supported", message.contains("not supported"), equalTo(true)); + assertThat(message, equalTo("Failed to compile inline script [0] using lang [expression]")); } } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/StoredExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/StoredExpressionTests.java index c922392a05a..1877326b7d0 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/StoredExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/StoredExpressionTests.java @@ -61,7 +61,7 @@ public class StoredExpressionTests extends ESIntegTestCase { fail("update script should have been rejected"); } catch(Exception e) { assertThat(e.getMessage(), containsString("failed to execute script")); - assertThat(e.getCause().getMessage(), containsString("scripts of type [stored], operation [update] and lang [expression] are not supported")); + assertThat(e.getCause().getMessage(), containsString("Failed to compile stored script [script1] using lang [expression]")); } try { client().prepareSearch() diff --git a/server/src/main/java/org/elasticsearch/script/ScriptService.java b/server/src/main/java/org/elasticsearch/script/ScriptService.java index d37cefb3a01..6a54af8721e 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptService.java @@ -281,15 +281,6 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust options = source.getOptions(); } - // TODO: fix this through some API or something, that's wrong - // special exception to prevent expressions from compiling as update or mapping scripts - boolean expression = "expression".equals(lang); - boolean notSupported = context.name.equals(UpdateScript.CONTEXT.name); - if (expression && notSupported) { - throw new UnsupportedOperationException("scripts of type [" + script.getType() + "]," + - " operation [" + context.name + "] and lang [" + lang + "] are not supported"); - } - ScriptEngine scriptEngine = getEngine(lang); if (isTypeEnabled(type) == false) { From 47b86d6e6ae615d5284b3b9bdc931c41dd3befb5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 18 Sep 2018 09:43:50 +0200 Subject: [PATCH 10/32] [CCR] Changed AutoFollowCoordinator to keep track of certain statistics (#33684) The following stats are being kept track of: 1) The total number of times that auto following a leader index succeed. 2) The total number of times that auto following a leader index failed. 3) The total number of times that fetching a remote cluster state failed. 4) The most recent 256 auto follow failures per auto leader index (e.g. create_and_follow api call fails) or cluster alias (e.g. fetching remote cluster state fails). Each auto follow run now produces a result that is being used to update the stats being kept track of in AutoFollowCoordinator. Relates to #33007 --- .../xpack/ccr/CcrMultiClusterLicenseIT.java | 2 +- .../xpack/ccr/CcrLicenseChecker.java | 2 +- .../ccr/action/AutoFollowCoordinator.java | 220 ++++++++++++------ .../action/AutoFollowCoordinatorTests.java | 101 +++++++- .../ccr/action/AutoFollowStatsTests.java | 77 ++++++ .../xpack/core/ccr/AutoFollowStats.java | 194 +++++++++++++++ 6 files changed, 515 insertions(+), 81 deletions(-) create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 7bc952a3ea8..505683b892c 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -64,7 +64,7 @@ public class CcrMultiClusterLicenseIT extends ESRestTestCase { while (it.hasNext()) { final String line = it.next(); if (line.matches(".*\\[WARN\\s*\\]\\[o\\.e\\.x\\.c\\.a\\.AutoFollowCoordinator\\s*\\] \\[node-0\\] " + - "failure occurred during auto-follower coordination")) { + "failure occurred while fetching cluster state in leader cluster \\[leader_cluster\\]")) { warn = true; break; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index c0000725887..f597871fc66 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -64,7 +64,7 @@ public final class CcrLicenseChecker { * * @param isCcrAllowed a boolean supplier that should return true if CCR is allowed and false otherwise */ - CcrLicenseChecker(final BooleanSupplier isCcrAllowed) { + public CcrLicenseChecker(final BooleanSupplier isCcrAllowed) { this.isCcrAllowed = Objects.requireNonNull(isCcrAllowed); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 180e5e37990..3a524e57249 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.ccr.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.client.Client; @@ -17,8 +19,10 @@ import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; import org.elasticsearch.license.LicenseUtils; @@ -27,15 +31,18 @@ import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.action.CreateAndFollowIndexAction; import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; +import java.util.TreeMap; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; @@ -47,6 +54,7 @@ import java.util.function.Function; public class AutoFollowCoordinator implements ClusterStateApplier { private static final Logger LOGGER = LogManager.getLogger(AutoFollowCoordinator.class); + private static final int MAX_AUTO_FOLLOW_ERRORS = 256; private final Client client; private final TimeValue pollInterval; @@ -56,6 +64,12 @@ public class AutoFollowCoordinator implements ClusterStateApplier { private volatile boolean localNodeMaster = false; + // The following fields are read and updated under a lock: + private long numberOfSuccessfulIndicesAutoFollowed = 0; + private long numberOfFailedIndicesAutoFollowed = 0; + private long numberOfFailedRemoteClusterStateRequests = 0; + private final LinkedHashMap recentAutoFollowErrors; + public AutoFollowCoordinator( Settings settings, Client client, @@ -69,6 +83,47 @@ public class AutoFollowCoordinator implements ClusterStateApplier { this.pollInterval = CcrSettings.CCR_AUTO_FOLLOW_POLL_INTERVAL.get(settings); clusterService.addStateApplier(this); + + this.recentAutoFollowErrors = new LinkedHashMap() { + @Override + protected boolean removeEldestEntry(final Map.Entry eldest) { + return size() > MAX_AUTO_FOLLOW_ERRORS; + } + }; + } + + public synchronized AutoFollowStats getStats() { + return new AutoFollowStats( + numberOfFailedIndicesAutoFollowed, + numberOfFailedRemoteClusterStateRequests, + numberOfSuccessfulIndicesAutoFollowed, + new TreeMap<>(recentAutoFollowErrors) + ); + } + + synchronized void updateStats(List results) { + for (AutoFollowResult result : results) { + if (result.clusterStateFetchException != null) { + recentAutoFollowErrors.put(result.clusterAlias, + new ElasticsearchException(result.clusterStateFetchException)); + numberOfFailedRemoteClusterStateRequests++; + LOGGER.warn(new ParameterizedMessage("failure occurred while fetching cluster state in leader cluster [{}]", + result.clusterAlias), result.clusterStateFetchException); + } else { + for (Map.Entry entry : result.autoFollowExecutionResults.entrySet()) { + if (entry.getValue() != null) { + numberOfFailedIndicesAutoFollowed++; + recentAutoFollowErrors.put(result.clusterAlias + ":" + entry.getKey().getName(), + new ElasticsearchException(entry.getValue())); + LOGGER.warn(new ParameterizedMessage("failure occurred while auto following index [{}] in leader cluster [{}]", + entry.getKey(), result.clusterAlias), entry.getValue()); + } else { + numberOfSuccessfulIndicesAutoFollowed++; + } + } + } + + } } private void doAutoFollow() { @@ -94,10 +149,8 @@ public class AutoFollowCoordinator implements ClusterStateApplier { return; } - Consumer handler = e -> { - if (e != null) { - LOGGER.warn("failure occurred during auto-follower coordination", e); - } + Consumer> handler = results -> { + updateStats(results); threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); }; AutoFollower operation = new AutoFollower(handler, followerClusterState) { @@ -178,101 +231,97 @@ public class AutoFollowCoordinator implements ClusterStateApplier { abstract static class AutoFollower { - private final Consumer handler; + private final Consumer> handler; private final ClusterState followerClusterState; private final AutoFollowMetadata autoFollowMetadata; private final CountDown autoFollowPatternsCountDown; - private final AtomicReference autoFollowPatternsErrorHolder = new AtomicReference<>(); + private final AtomicArray autoFollowResults; - AutoFollower(final Consumer handler, final ClusterState followerClusterState) { + AutoFollower(final Consumer> handler, final ClusterState followerClusterState) { this.handler = handler; this.followerClusterState = followerClusterState; this.autoFollowMetadata = followerClusterState.getMetaData().custom(AutoFollowMetadata.TYPE); this.autoFollowPatternsCountDown = new CountDown(autoFollowMetadata.getPatterns().size()); + this.autoFollowResults = new AtomicArray<>(autoFollowMetadata.getPatterns().size()); } void autoFollowIndices() { + int i = 0; for (Map.Entry entry : autoFollowMetadata.getPatterns().entrySet()) { - String clusterAlias = entry.getKey(); - AutoFollowPattern autoFollowPattern = entry.getValue(); - List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); + final int slot = i; + final String clusterAlias = entry.getKey(); + final AutoFollowPattern autoFollowPattern = entry.getValue(); getLeaderClusterState(autoFollowPattern.getHeaders(), clusterAlias, (leaderClusterState, e) -> { if (leaderClusterState != null) { assert e == null; - handleClusterAlias(clusterAlias, autoFollowPattern, followedIndices, leaderClusterState); + final List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); + final List leaderIndicesToFollow = + getLeaderIndicesToFollow(autoFollowPattern, leaderClusterState, followerClusterState, followedIndices); + if (leaderIndicesToFollow.isEmpty()) { + finalise(slot, new AutoFollowResult(clusterAlias)); + } else { + Consumer resultHandler = result -> finalise(slot, result); + checkAutoFollowPattern(clusterAlias, autoFollowPattern, leaderIndicesToFollow, resultHandler); + } } else { - finalise(e); + finalise(slot, new AutoFollowResult(clusterAlias, e)); + } + }); + i++; + } + } + + private void checkAutoFollowPattern(String clusterAlias, AutoFollowPattern autoFollowPattern, + List leaderIndicesToFollow, Consumer resultHandler) { + + final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); + final AtomicArray> results = new AtomicArray<>(leaderIndicesToFollow.size()); + for (int i = 0; i < leaderIndicesToFollow.size(); i++) { + final Index indexToFollow = leaderIndicesToFollow.get(i); + final int slot = i; + followLeaderIndex(clusterAlias, indexToFollow, autoFollowPattern, error -> { + results.set(slot, new Tuple<>(indexToFollow, error)); + if (leaderIndicesCountDown.countDown()) { + resultHandler.accept(new AutoFollowResult(clusterAlias, results.asList())); } }); } } - private void handleClusterAlias(String clusterAlias, AutoFollowPattern autoFollowPattern, - List followedIndexUUIDs, ClusterState leaderClusterState) { - final List leaderIndicesToFollow = - getLeaderIndicesToFollow(autoFollowPattern, leaderClusterState, followerClusterState, followedIndexUUIDs); - if (leaderIndicesToFollow.isEmpty()) { - finalise(null); - } else { - final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); - final AtomicReference leaderIndicesErrorHolder = new AtomicReference<>(); - for (Index indexToFollow : leaderIndicesToFollow) { - final String leaderIndexName = indexToFollow.getName(); - final String followIndexName = getFollowerIndexName(autoFollowPattern, leaderIndexName); + private void followLeaderIndex(String clusterAlias, Index indexToFollow, + AutoFollowPattern pattern, Consumer onResult) { + final String leaderIndexName = indexToFollow.getName(); + final String followIndexName = getFollowerIndexName(pattern, leaderIndexName); - String leaderIndexNameWithClusterAliasPrefix = clusterAlias.equals("_local_") ? leaderIndexName : - clusterAlias + ":" + leaderIndexName; - FollowIndexAction.Request followRequest = - new FollowIndexAction.Request(leaderIndexNameWithClusterAliasPrefix, followIndexName, - autoFollowPattern.getMaxBatchOperationCount(), autoFollowPattern.getMaxConcurrentReadBatches(), - autoFollowPattern.getMaxOperationSizeInBytes(), autoFollowPattern.getMaxConcurrentWriteBatches(), - autoFollowPattern.getMaxWriteBufferSize(), autoFollowPattern.getMaxRetryDelay(), - autoFollowPattern.getIdleShardRetryDelay()); + String leaderIndexNameWithClusterAliasPrefix = clusterAlias.equals("_local_") ? leaderIndexName : + clusterAlias + ":" + leaderIndexName; + FollowIndexAction.Request request = + new FollowIndexAction.Request(leaderIndexNameWithClusterAliasPrefix, followIndexName, + pattern.getMaxBatchOperationCount(), pattern.getMaxConcurrentReadBatches(), + pattern.getMaxOperationSizeInBytes(), pattern.getMaxConcurrentWriteBatches(), + pattern.getMaxWriteBufferSize(), pattern.getMaxRetryDelay(), + pattern.getIdleShardRetryDelay()); - // Execute if the create and follow api call succeeds: - Runnable successHandler = () -> { - LOGGER.info("Auto followed leader index [{}] as follow index [{}]", leaderIndexName, followIndexName); + // Execute if the create and follow api call succeeds: + Runnable successHandler = () -> { + LOGGER.info("Auto followed leader index [{}] as follow index [{}]", leaderIndexName, followIndexName); - // This function updates the auto follow metadata in the cluster to record that the leader index has been followed: - // (so that we do not try to follow it in subsequent auto follow runs) - Function function = recordLeaderIndexAsFollowFunction(clusterAlias, indexToFollow); - // The coordinator always runs on the elected master node, so we can update cluster state here: - updateAutoFollowMetadata(function, updateError -> { - if (updateError != null) { - LOGGER.error("Failed to mark leader index [" + leaderIndexName + "] as auto followed", updateError); - if (leaderIndicesErrorHolder.compareAndSet(null, updateError) == false) { - leaderIndicesErrorHolder.get().addSuppressed(updateError); - } - } else { - LOGGER.debug("Successfully marked leader index [{}] as auto followed", leaderIndexName); - } - if (leaderIndicesCountDown.countDown()) { - finalise(leaderIndicesErrorHolder.get()); - } - }); - }; - // Execute if the create and follow apu call fails: - Consumer failureHandler = followError -> { - assert followError != null; - LOGGER.warn("Failed to auto follow leader index [" + leaderIndexName + "]", followError); - if (leaderIndicesCountDown.countDown()) { - finalise(followError); - } - }; - createAndFollow(autoFollowPattern.getHeaders(), followRequest, successHandler, failureHandler); - } - } + // This function updates the auto follow metadata in the cluster to record that the leader index has been followed: + // (so that we do not try to follow it in subsequent auto follow runs) + Function function = recordLeaderIndexAsFollowFunction(clusterAlias, indexToFollow); + // The coordinator always runs on the elected master node, so we can update cluster state here: + updateAutoFollowMetadata(function, onResult); + }; + createAndFollow(pattern.getHeaders(), request, successHandler, onResult); } - private void finalise(Exception failure) { - if (autoFollowPatternsErrorHolder.compareAndSet(null, failure) == false) { - autoFollowPatternsErrorHolder.get().addSuppressed(failure); - } - + private void finalise(int slot, AutoFollowResult result) { + assert autoFollowResults.get(slot) == null; + autoFollowResults.set(slot, result); if (autoFollowPatternsCountDown.countDown()) { - handler.accept(autoFollowPatternsErrorHolder.get()); + handler.accept(autoFollowResults.asList()); } } @@ -347,4 +396,33 @@ public class AutoFollowCoordinator implements ClusterStateApplier { ); } + + static class AutoFollowResult { + + final String clusterAlias; + final Exception clusterStateFetchException; + final Map autoFollowExecutionResults; + + AutoFollowResult(String clusterAlias, List> results) { + this.clusterAlias = clusterAlias; + + Map autoFollowExecutionResults = new HashMap<>(); + for (Tuple result : results) { + autoFollowExecutionResults.put(result.v1(), result.v2()); + } + + this.clusterStateFetchException = null; + this.autoFollowExecutionResults = Collections.unmodifiableMap(autoFollowExecutionResults); + } + + AutoFollowResult(String clusterAlias, Exception e) { + this.clusterAlias = clusterAlias; + this.clusterStateFetchException = e; + this.autoFollowExecutionResults = Collections.emptyMap(); + } + + AutoFollowResult(String clusterAlias) { + this(clusterAlias, (Exception) null); + } + } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index 31af326250c..218825e4120 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -11,15 +11,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; import org.elasticsearch.xpack.core.ccr.action.FollowIndexAction; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -63,9 +68,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { .build(); boolean[] invoked = new boolean[]{false}; - Consumer handler = e -> { + Consumer> handler = results -> { invoked[0] = true; - assertThat(e, nullValue()); + + assertThat(results.size(), equalTo(1)); + assertThat(results.get(0).clusterStateFetchException, nullValue()); + List> entries = new ArrayList<>(results.get(0).autoFollowExecutionResults.entrySet()); + assertThat(entries.size(), equalTo(1)); + assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); + assertThat(entries.get(0).getValue(), nullValue()); }; AutoFollower autoFollower = new AutoFollower(handler, currentState) { @Override @@ -116,9 +127,12 @@ public class AutoFollowCoordinatorTests extends ESTestCase { Exception failure = new RuntimeException("failure"); boolean[] invoked = new boolean[]{false}; - Consumer handler = e -> { + Consumer> handler = results -> { invoked[0] = true; - assertThat(e, sameInstance(failure)); + + assertThat(results.size(), equalTo(1)); + assertThat(results.get(0).clusterStateFetchException, sameInstance(failure)); + assertThat(results.get(0).autoFollowExecutionResults.entrySet().size(), equalTo(0)); }; AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override @@ -170,9 +184,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { Exception failure = new RuntimeException("failure"); boolean[] invoked = new boolean[]{false}; - Consumer handler = e -> { + Consumer> handler = results -> { invoked[0] = true; - assertThat(e, sameInstance(failure)); + + assertThat(results.size(), equalTo(1)); + assertThat(results.get(0).clusterStateFetchException, nullValue()); + List> entries = new ArrayList<>(results.get(0).autoFollowExecutionResults.entrySet()); + assertThat(entries.size(), equalTo(1)); + assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); + assertThat(entries.get(0).getValue(), sameInstance(failure)); }; AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override @@ -225,9 +245,15 @@ public class AutoFollowCoordinatorTests extends ESTestCase { Exception failure = new RuntimeException("failure"); boolean[] invoked = new boolean[]{false}; - Consumer handler = e -> { + Consumer> handler = results -> { invoked[0] = true; - assertThat(e, sameInstance(failure)); + + assertThat(results.size(), equalTo(1)); + assertThat(results.get(0).clusterStateFetchException, nullValue()); + List> entries = new ArrayList<>(results.get(0).autoFollowExecutionResults.entrySet()); + assertThat(entries.size(), equalTo(1)); + assertThat(entries.get(0).getKey().getName(), equalTo("logs-20190101")); + assertThat(entries.get(0).getValue(), sameInstance(failure)); }; AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override @@ -317,4 +343,63 @@ public class AutoFollowCoordinatorTests extends ESTestCase { assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); } + public void testStats() { + AutoFollowCoordinator autoFollowCoordinator = new AutoFollowCoordinator( + Settings.EMPTY, + null, + null, + mock(ClusterService.class), + new CcrLicenseChecker(() -> true) + ); + + autoFollowCoordinator.updateStats(Collections.singletonList( + new AutoFollowCoordinator.AutoFollowResult("_alias1")) + ); + AutoFollowStats autoFollowStats = autoFollowCoordinator.getStats(); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(0L)); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(0)); + + autoFollowCoordinator.updateStats(Collections.singletonList( + new AutoFollowCoordinator.AutoFollowResult("_alias1", new RuntimeException("error"))) + ); + autoFollowStats = autoFollowCoordinator.getStats(); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(1L)); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(1)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias1").getCause().getMessage(), equalTo("error")); + + autoFollowCoordinator.updateStats(Arrays.asList( + new AutoFollowCoordinator.AutoFollowResult("_alias1", + Collections.singletonList(Tuple.tuple(new Index("index1", "_na_"), new RuntimeException("error")))), + new AutoFollowCoordinator.AutoFollowResult("_alias2", + Collections.singletonList(Tuple.tuple(new Index("index2", "_na_"), new RuntimeException("error")))) + )); + autoFollowStats = autoFollowCoordinator.getStats(); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(2L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(1L)); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(0L)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(3)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias1").getCause().getMessage(), equalTo("error")); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias1:index1").getCause().getMessage(), equalTo("error")); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error")); + + autoFollowCoordinator.updateStats(Arrays.asList( + new AutoFollowCoordinator.AutoFollowResult("_alias1", + Collections.singletonList(Tuple.tuple(new Index("index1", "_na_"), null))), + new AutoFollowCoordinator.AutoFollowResult("_alias2", + Collections.singletonList(Tuple.tuple(new Index("index2", "_na_"), null))) + )); + autoFollowStats = autoFollowCoordinator.getStats(); + assertThat(autoFollowStats.getNumberOfFailedFollowIndices(), equalTo(2L)); + assertThat(autoFollowStats.getNumberOfFailedRemoteClusterStateRequests(), equalTo(1L)); + assertThat(autoFollowStats.getNumberOfSuccessfulFollowIndices(), equalTo(2L)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().size(), equalTo(3)); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias1").getCause().getMessage(), equalTo("error")); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias1:index1").getCause().getMessage(), equalTo("error")); + assertThat(autoFollowStats.getRecentAutoFollowErrors().get("_alias2:index2").getCause().getMessage(), equalTo("error")); + } + } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java new file mode 100644 index 00000000000..b9ee5bf4646 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowStatsTests.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowStats; + +import java.io.IOException; +import java.util.Map; +import java.util.NavigableMap; +import java.util.TreeMap; + +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class AutoFollowStatsTests extends AbstractSerializingTestCase { + + @Override + protected AutoFollowStats doParseInstance(XContentParser parser) throws IOException { + return AutoFollowStats.fromXContent(parser); + } + + @Override + protected AutoFollowStats createTestInstance() { + return new AutoFollowStats( + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomReadExceptions() + ); + } + + private static NavigableMap randomReadExceptions() { + final int count = randomIntBetween(0, 16); + final NavigableMap readExceptions = new TreeMap<>(); + for (int i = 0; i < count; i++) { + readExceptions.put("" + i, new ElasticsearchException(new IllegalStateException("index [" + i + "]"))); + } + return readExceptions; + } + + @Override + protected Writeable.Reader instanceReader() { + return AutoFollowStats::new; + } + + @Override + protected void assertEqualInstances(AutoFollowStats expectedInstance, AutoFollowStats newInstance) { + assertNotSame(expectedInstance, newInstance); + + assertThat(newInstance.getRecentAutoFollowErrors().size(), equalTo(expectedInstance.getRecentAutoFollowErrors().size())); + assertThat(newInstance.getRecentAutoFollowErrors().keySet(), equalTo(expectedInstance.getRecentAutoFollowErrors().keySet())); + for (final Map.Entry entry : newInstance.getRecentAutoFollowErrors().entrySet()) { + // x-content loses the exception + final ElasticsearchException expected = expectedInstance.getRecentAutoFollowErrors().get(entry.getKey()); + assertThat(entry.getValue().getMessage(), containsString(expected.getMessage())); + assertNotNull(entry.getValue().getCause()); + assertThat( + entry.getValue().getCause(), + anyOf(instanceOf(ElasticsearchException.class), instanceOf(IllegalStateException.class))); + assertThat(entry.getValue().getCause().getMessage(), containsString(expected.getCause().getMessage())); + } + } + + @Override + protected boolean assertToXContentEquivalence() { + return false; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java new file mode 100644 index 00000000000..7133a201f4e --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowStats.java @@ -0,0 +1,194 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ccr; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.AbstractMap; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Objects; +import java.util.TreeMap; +import java.util.stream.Collectors; + +public class AutoFollowStats implements Writeable, ToXContentObject { + + private static final ParseField NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED = new ParseField("number_of_successful_follow_indices"); + private static final ParseField NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED = new ParseField("number_of_failed_follow_indices"); + private static final ParseField NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS = + new ParseField("number_of_failed_remote_cluster_state_requests"); + private static final ParseField RECENT_AUTO_FOLLOW_ERRORS = new ParseField("recent_auto_follow_errors"); + private static final ParseField LEADER_INDEX = new ParseField("leader_index"); + private static final ParseField AUTO_FOLLOW_EXCEPTION = new ParseField("auto_follow_exception"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser STATS_PARSER = new ConstructingObjectParser<>("auto_follow_stats", + args -> new AutoFollowStats( + (Long) args[0], + (Long) args[1], + (Long) args[2], + new TreeMap<>( + ((List>) args[3]) + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) + )); + + private static final ConstructingObjectParser, Void> AUTO_FOLLOW_EXCEPTIONS_PARSER = + new ConstructingObjectParser<>( + "auto_follow_stats_errors", + args -> new AbstractMap.SimpleEntry<>((String) args[0], (ElasticsearchException) args[1])); + + static { + AUTO_FOLLOW_EXCEPTIONS_PARSER.declareString(ConstructingObjectParser.constructorArg(), LEADER_INDEX); + AUTO_FOLLOW_EXCEPTIONS_PARSER.declareObject( + ConstructingObjectParser.constructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + AUTO_FOLLOW_EXCEPTION); + + STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED); + STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS); + STATS_PARSER.declareLong(ConstructingObjectParser.constructorArg(), NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED); + STATS_PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AUTO_FOLLOW_EXCEPTIONS_PARSER, + RECENT_AUTO_FOLLOW_ERRORS); + } + + public static AutoFollowStats fromXContent(final XContentParser parser) { + return STATS_PARSER.apply(parser, null); + } + + private final long numberOfFailedFollowIndices; + private final long numberOfFailedRemoteClusterStateRequests; + private final long numberOfSuccessfulFollowIndices; + private final NavigableMap recentAutoFollowErrors; + + public AutoFollowStats( + long numberOfFailedFollowIndices, + long numberOfFailedRemoteClusterStateRequests, + long numberOfSuccessfulFollowIndices, + NavigableMap recentAutoFollowErrors + ) { + this.numberOfFailedFollowIndices = numberOfFailedFollowIndices; + this.numberOfFailedRemoteClusterStateRequests = numberOfFailedRemoteClusterStateRequests; + this.numberOfSuccessfulFollowIndices = numberOfSuccessfulFollowIndices; + this.recentAutoFollowErrors = recentAutoFollowErrors; + } + + public AutoFollowStats(StreamInput in) throws IOException { + numberOfFailedFollowIndices = in.readVLong(); + numberOfFailedRemoteClusterStateRequests = in.readVLong(); + numberOfSuccessfulFollowIndices = in.readVLong(); + recentAutoFollowErrors= new TreeMap<>(in.readMap(StreamInput::readString, StreamInput::readException)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(numberOfFailedFollowIndices); + out.writeVLong(numberOfFailedRemoteClusterStateRequests); + out.writeVLong(numberOfSuccessfulFollowIndices); + out.writeMap(recentAutoFollowErrors, StreamOutput::writeString, StreamOutput::writeException); + } + + public long getNumberOfFailedFollowIndices() { + return numberOfFailedFollowIndices; + } + + public long getNumberOfFailedRemoteClusterStateRequests() { + return numberOfFailedRemoteClusterStateRequests; + } + + public long getNumberOfSuccessfulFollowIndices() { + return numberOfSuccessfulFollowIndices; + } + + public NavigableMap getRecentAutoFollowErrors() { + return recentAutoFollowErrors; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(NUMBER_OF_FAILED_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfFailedFollowIndices); + builder.field(NUMBER_OF_FAILED_REMOTE_CLUSTER_STATE_REQUESTS.getPreferredName(), numberOfFailedRemoteClusterStateRequests); + builder.field(NUMBER_OF_SUCCESSFUL_INDICES_AUTO_FOLLOWED.getPreferredName(), numberOfSuccessfulFollowIndices); + builder.startArray(RECENT_AUTO_FOLLOW_ERRORS.getPreferredName()); + { + for (final Map.Entry entry : recentAutoFollowErrors.entrySet()) { + builder.startObject(); + { + builder.field(LEADER_INDEX.getPreferredName(), entry.getKey()); + builder.field(AUTO_FOLLOW_EXCEPTION.getPreferredName()); + builder.startObject(); + { + ElasticsearchException.generateThrowableXContent(builder, params, entry.getValue()); + } + builder.endObject(); + } + builder.endObject(); + } + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFollowStats that = (AutoFollowStats) o; + return numberOfFailedFollowIndices == that.numberOfFailedFollowIndices && + numberOfFailedRemoteClusterStateRequests == that.numberOfFailedRemoteClusterStateRequests && + numberOfSuccessfulFollowIndices == that.numberOfSuccessfulFollowIndices && + /* + * ElasticsearchException does not implement equals so we will assume the fetch exceptions are equal if they are equal + * up to the key set and their messages. Note that we are relying on the fact that the auto follow exceptions are ordered by + * keys. + */ + recentAutoFollowErrors.keySet().equals(that.recentAutoFollowErrors.keySet()) && + getFetchExceptionMessages(this).equals(getFetchExceptionMessages(that)); + } + + @Override + public int hashCode() { + return Objects.hash( + numberOfFailedFollowIndices, + numberOfFailedRemoteClusterStateRequests, + numberOfSuccessfulFollowIndices, + /* + * ElasticsearchException does not implement hash code so we will compute the hash code based on the key set and the + * messages. Note that we are relying on the fact that the auto follow exceptions are ordered by keys. + */ + recentAutoFollowErrors.keySet(), + getFetchExceptionMessages(this) + ); + } + + private static List getFetchExceptionMessages(final AutoFollowStats status) { + return status.getRecentAutoFollowErrors().values().stream().map(ElasticsearchException::getMessage).collect(Collectors.toList()); + } + + @Override + public String toString() { + return "AutoFollowStats{" + + "numberOfFailedFollowIndices=" + numberOfFailedFollowIndices + + ", numberOfFailedRemoteClusterStateRequests=" + numberOfFailedRemoteClusterStateRequests + + ", numberOfSuccessfulFollowIndices=" + numberOfSuccessfulFollowIndices + + ", recentAutoFollowErrors=" + recentAutoFollowErrors + + '}'; + } +} From e075b872f620c4ac2abda18d281db332c7e69847 Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 18 Sep 2018 10:14:12 +0200 Subject: [PATCH 11/32] Dependencies: Update javax.mail in watcher to 1.6.2 (#33664) --- x-pack/plugin/watcher/build.gradle | 2 +- x-pack/plugin/watcher/licenses/javax.mail-1.5.6.jar.sha1 | 1 - x-pack/plugin/watcher/licenses/javax.mail-1.6.2.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 x-pack/plugin/watcher/licenses/javax.mail-1.5.6.jar.sha1 create mode 100644 x-pack/plugin/watcher/licenses/javax.mail-1.6.2.jar.sha1 diff --git a/x-pack/plugin/watcher/build.gradle b/x-pack/plugin/watcher/build.gradle index 3412cafc4f4..6f3497df8fc 100644 --- a/x-pack/plugin/watcher/build.gradle +++ b/x-pack/plugin/watcher/build.gradle @@ -35,7 +35,7 @@ dependencies { // watcher deps compile 'com.googlecode.owasp-java-html-sanitizer:owasp-java-html-sanitizer:r239' compile 'com.google.guava:guava:16.0.1' // needed by watcher for the html sanitizer and security tests for jimfs - compile 'com.sun.mail:javax.mail:1.5.6' + compile 'com.sun.mail:javax.mail:1.6.2' // HACK: java 9 removed javax.activation from the default modules, so instead of trying to add modules, which would have // to be conditionalized for java 8/9, we pull in the classes directly compile 'javax.activation:activation:1.1.1' diff --git a/x-pack/plugin/watcher/licenses/javax.mail-1.5.6.jar.sha1 b/x-pack/plugin/watcher/licenses/javax.mail-1.5.6.jar.sha1 deleted file mode 100644 index c9d823f6a53..00000000000 --- a/x-pack/plugin/watcher/licenses/javax.mail-1.5.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab5daef2f881c42c8e280cbe918ec4d7fdfd7efe \ No newline at end of file diff --git a/x-pack/plugin/watcher/licenses/javax.mail-1.6.2.jar.sha1 b/x-pack/plugin/watcher/licenses/javax.mail-1.6.2.jar.sha1 new file mode 100644 index 00000000000..1c865d47f57 --- /dev/null +++ b/x-pack/plugin/watcher/licenses/javax.mail-1.6.2.jar.sha1 @@ -0,0 +1 @@ +935151eb71beff17a2ffac15dd80184a99a0514f \ No newline at end of file From 139128856a5f63deb41243756e2b3b3a843fa57c Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Tue, 18 Sep 2018 10:25:16 +0200 Subject: [PATCH 12/32] Watcher: Use Bulkprocessor in HistoryStore/TriggeredWatchStore (#32490) Currently a watch execution results in one bulk request, when the triggered watches are written into the that index, that need to be executed. However the update of the watch status, the creation of the watch history entry as well as the deletion of the triggered watches index are all single document operations. This can have quite a negative impact, once you are executing a lot of watches, as each execution results in 4 documents writes, three of them being single document actions. This commit switches to a bulk processor instead of a single document action for writing watch history entries and deleting triggered watch entries. However the defaults are to run synchronous as before because the number of concurrent requests is set to 0. This also fixes a bug, where the deletion of the triggered watch entry was done asynchronously. However if you have a high number of watches being executed, you can configure watcher to delete the triggered watches entries as well as writing the watch history entries via bulk requests. The triggered watches deletions should still happen in a timely manner, where as the history entries might actually be bound by size as one entry can easily have 20kb. The following settings have been added: - xpack.watcher.bulk.actions (default 1) - xpack.watcher.bulk.concurrent_requests (default 0) - xpack.watcher.bulk.flush_interval (default 1s) - xpack.watcher.bulk.size (default 1mb) The drawback of this is of course, that on a node outage you might end up with watch history entries not being written or watches needing to be executing again because they have not been deleted from the triggered watches index. The window of these two cases increases configuring the bulk processor to wait to reach certain thresholds. --- .../elasticsearch/xpack/watcher/Watcher.java | 86 ++++++++- .../watcher/execution/ExecutionService.java | 17 +- .../execution/TriggeredWatchStore.java | 59 ++++--- .../xpack/watcher/history/HistoryStore.java | 73 ++------ .../hipchat/IntegrationAccount.java | 1 - .../execution/TriggeredWatchStoreTests.java | 166 ++++++++++++++---- .../watcher/history/HistoryStoreTests.java | 52 ++++-- 7 files changed, 291 insertions(+), 163 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 33b79c38cca..32d492b78a7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -5,9 +5,14 @@ */ package org.elasticsearch.xpack.watcher; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -20,13 +25,14 @@ import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.util.Providers; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.LoggerMessageFormat; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -51,6 +57,7 @@ import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -184,12 +191,16 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.function.UnaryOperator; +import java.util.stream.Collectors; import static java.util.Collections.emptyList; +import static org.elasticsearch.common.settings.Setting.Property.NodeScope; +import static org.elasticsearch.xpack.core.ClientHelper.WATCHER_ORIGIN; public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, ReloadablePlugin { @@ -201,6 +212,16 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa Setting.boolSetting("xpack.watcher.encrypt_sensitive_data", false, Setting.Property.NodeScope); public static final Setting MAX_STOP_TIMEOUT_SETTING = Setting.timeSetting("xpack.watcher.stop.timeout", TimeValue.timeValueSeconds(30), Setting.Property.NodeScope); + private static final Setting SETTING_BULK_ACTIONS = + Setting.intSetting("xpack.watcher.bulk.actions", 1, 1, 10000, NodeScope); + private static final Setting SETTING_BULK_CONCURRENT_REQUESTS = + Setting.intSetting("xpack.watcher.bulk.concurrent_requests", 0, 0, 20, NodeScope); + private static final Setting SETTING_BULK_FLUSH_INTERVAL = + Setting.timeSetting("xpack.watcher.bulk.flush_interval", TimeValue.timeValueSeconds(1), NodeScope); + private static final Setting SETTING_BULK_SIZE = + Setting.byteSizeSetting("xpack.watcher.bulk.size", new ByteSizeValue(1, ByteSizeUnit.MB), + new ByteSizeValue(1, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.MB), NodeScope); + public static final ScriptContext SCRIPT_SEARCH_CONTEXT = new ScriptContext<>("xpack", SearchScript.Factory.class); @@ -210,9 +231,10 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa public static final ScriptContext SCRIPT_TEMPLATE_CONTEXT = new ScriptContext<>("xpack_template", TemplateScript.Factory.class); - private static final Logger logger = Loggers.getLogger(Watcher.class); + private static final Logger logger = LogManager.getLogger(Watcher.class); private WatcherIndexingListener listener; private HttpClient httpClient; + private BulkProcessor bulkProcessor; protected final Settings settings; protected final boolean transportClient; @@ -318,7 +340,49 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa final InputRegistry inputRegistry = new InputRegistry(settings, inputFactories); inputFactories.put(ChainInput.TYPE, new ChainInputFactory(settings, inputRegistry)); - final HistoryStore historyStore = new HistoryStore(settings, client); + bulkProcessor = BulkProcessor.builder(ClientHelper.clientWithOrigin(client, WATCHER_ORIGIN), new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + if (response.hasFailures()) { + Map triggeredWatches = Arrays.stream(response.getItems()) + .filter(BulkItemResponse::isFailed) + .filter(r -> r.getIndex().startsWith(TriggeredWatchStoreField.INDEX_NAME)) + .collect(Collectors.toMap(BulkItemResponse::getId, BulkItemResponse::getFailureMessage)); + if (triggeredWatches.isEmpty() == false) { + String failure = triggeredWatches.values().stream().collect(Collectors.joining(", ")); + logger.error("triggered watches could not be deleted {}, failure [{}]", + triggeredWatches.keySet(), Strings.substring(failure, 0, 2000)); + } + + Map overwrittenIds = Arrays.stream(response.getItems()) + .filter(BulkItemResponse::isFailed) + .filter(r -> r.getIndex().startsWith(HistoryStoreField.INDEX_PREFIX)) + .filter(r -> r.getVersion() > 1) + .collect(Collectors.toMap(BulkItemResponse::getId, BulkItemResponse::getFailureMessage)); + if (overwrittenIds.isEmpty() == false) { + String failure = overwrittenIds.values().stream().collect(Collectors.joining(", ")); + logger.info("overwrote watch history entries {}, possible second execution of a triggered watch, failure [{}]", + overwrittenIds.keySet(), Strings.substring(failure, 0, 2000)); + } + } + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + logger.error("error executing bulk", failure); + } + }) + .setFlushInterval(SETTING_BULK_FLUSH_INTERVAL.get(settings)) + .setBulkActions(SETTING_BULK_ACTIONS.get(settings)) + .setBulkSize(SETTING_BULK_SIZE.get(settings)) + .setConcurrentRequests(SETTING_BULK_CONCURRENT_REQUESTS.get(settings)) + .build(); + + HistoryStore historyStore = new HistoryStore(settings, bulkProcessor); // schedulers final Set scheduleParsers = new HashSet<>(); @@ -340,7 +404,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa final TriggerService triggerService = new TriggerService(settings, triggerEngines); final TriggeredWatch.Parser triggeredWatchParser = new TriggeredWatch.Parser(settings, triggerService); - final TriggeredWatchStore triggeredWatchStore = new TriggeredWatchStore(settings, client, triggeredWatchParser); + final TriggeredWatchStore triggeredWatchStore = new TriggeredWatchStore(settings, client, triggeredWatchParser, bulkProcessor); final WatcherSearchTemplateService watcherSearchTemplateService = new WatcherSearchTemplateService(settings, scriptService, xContentRegistry); @@ -416,6 +480,12 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa settings.add(Setting.simpleString("xpack.watcher.execution.scroll.timeout", Setting.Property.NodeScope)); settings.add(WatcherLifeCycleService.SETTING_REQUIRE_MANUAL_START); + // bulk processor configuration + settings.add(SETTING_BULK_ACTIONS); + settings.add(SETTING_BULK_CONCURRENT_REQUESTS); + settings.add(SETTING_BULK_FLUSH_INTERVAL); + settings.add(SETTING_BULK_SIZE); + // notification services settings.addAll(SlackService.getSettings()); settings.addAll(EmailService.getSettings()); @@ -608,7 +678,15 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, Reloa @Override public void close() throws IOException { + bulkProcessor.flush(); IOUtils.closeWhileHandlingException(httpClient); + try { + if (bulkProcessor.awaitClose(10, TimeUnit.SECONDS) == false) { + logger.warn("failed to properly close watcher bulk processor"); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } } /** diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java index 7b77afb225e..3507bd4eb36 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ExecutionService.java @@ -320,11 +320,8 @@ public class ExecutionService extends AbstractComponent { // TODO log watch record in logger, when saving in history store failed, otherwise the info is gone! } } - try { - triggeredWatchStore.delete(ctx.id()); - } catch (Exception e) { - logger.error((Supplier) () -> new ParameterizedMessage("failed to delete triggered watch [{}]", ctx.id()), e); - } + + triggeredWatchStore.delete(ctx.id()); } currentExecutions.get().remove(watchId); logger.debug("finished [{}]/[{}]", watchId, ctx.id()); @@ -412,14 +409,8 @@ public class ExecutionService extends AbstractComponent { triggeredWatch.id()), exc); } - try { - triggeredWatchStore.delete(triggeredWatch.id()); - } catch (Exception exc) { - logger.error((Supplier) () -> - new ParameterizedMessage("Error deleting triggered watch store record for watch [{}] after thread pool " + - "rejection", triggeredWatch.id()), exc); - } - }; + triggeredWatchStore.delete(triggeredWatch.id()); + } } WatchRecord executeInner(WatchExecutionContext ctx) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index e0164b5bdbd..9a4b555d633 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -24,7 +25,6 @@ import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -32,6 +32,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortBuilders; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.watcher.execution.TriggeredWatchStoreField; import org.elasticsearch.xpack.core.watcher.execution.Wid; import org.elasticsearch.xpack.core.watcher.watch.Watch; @@ -46,8 +47,6 @@ import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ClientHelper.WATCHER_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; public class TriggeredWatchStore extends AbstractComponent { @@ -58,21 +57,17 @@ public class TriggeredWatchStore extends AbstractComponent { private final TimeValue defaultBulkTimeout; private final TimeValue defaultSearchTimeout; + private final BulkProcessor bulkProcessor; - public TriggeredWatchStore(Settings settings, Client client, TriggeredWatch.Parser triggeredWatchParser) { + public TriggeredWatchStore(Settings settings, Client client, TriggeredWatch.Parser triggeredWatchParser, BulkProcessor bulkProcessor) { super(settings); this.scrollSize = settings.getAsInt("xpack.watcher.execution.scroll.size", 1000); - this.client = client; + this.client = ClientHelper.clientWithOrigin(client, WATCHER_ORIGIN); this.scrollTimeout = settings.getAsTime("xpack.watcher.execution.scroll.timeout", TimeValue.timeValueMinutes(5)); this.defaultBulkTimeout = settings.getAsTime("xpack.watcher.internal.ops.bulk.default_timeout", TimeValue.timeValueSeconds(120)); this.defaultSearchTimeout = settings.getAsTime("xpack.watcher.internal.ops.search.default_timeout", TimeValue.timeValueSeconds(30)); this.triggeredWatchParser = triggeredWatchParser; - } - - public static boolean validate(ClusterState state) { - IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(TriggeredWatchStoreField.INDEX_NAME, state.metaData()); - return indexMetaData == null || (indexMetaData.getState() == IndexMetaData.State.OPEN && - state.routingTable().index(indexMetaData.getIndex()).allPrimaryShardsActive()); + this.bulkProcessor = bulkProcessor; } public void putAll(final List triggeredWatches, final ActionListener listener) throws IOException { @@ -81,8 +76,7 @@ public class TriggeredWatchStore extends AbstractComponent { return; } - executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, createBulkRequest(triggeredWatches, - TriggeredWatchStoreField.DOC_TYPE), listener, client::bulk); + client.bulk(createBulkRequest(triggeredWatches), listener); } public BulkResponse putAll(final List triggeredWatches) throws IOException { @@ -94,14 +88,14 @@ public class TriggeredWatchStore extends AbstractComponent { /** * Create a bulk request from the triggered watches with a specified document type * @param triggeredWatches The list of triggered watches - * @param docType The document type to use, either the current one or legacy * @return The bulk request for the triggered watches * @throws IOException If a triggered watch could not be parsed to JSON, this exception is thrown */ - private BulkRequest createBulkRequest(final List triggeredWatches, String docType) throws IOException { + private BulkRequest createBulkRequest(final List triggeredWatches) throws IOException { BulkRequest request = new BulkRequest(); for (TriggeredWatch triggeredWatch : triggeredWatches) { - IndexRequest indexRequest = new IndexRequest(TriggeredWatchStoreField.INDEX_NAME, docType, triggeredWatch.id().value()); + IndexRequest indexRequest = new IndexRequest(TriggeredWatchStoreField.INDEX_NAME, TriggeredWatchStoreField.DOC_TYPE, + triggeredWatch.id().value()); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { triggeredWatch.toXContent(builder, ToXContent.EMPTY_PARAMS); indexRequest.source(builder); @@ -112,12 +106,15 @@ public class TriggeredWatchStore extends AbstractComponent { return request; } + /** + * Delete a triggered watch entry. + * Note that this happens asynchronously, as these kind of requests are batched together to reduce the amount of concurrent requests. + * + * @param wid The ID os the triggered watch id + */ public void delete(Wid wid) { DeleteRequest request = new DeleteRequest(TriggeredWatchStoreField.INDEX_NAME, TriggeredWatchStoreField.DOC_TYPE, wid.value()); - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { - client.delete(request); // FIXME shouldn't we wait before saying the delete was successful - } - logger.trace("successfully deleted triggered watch with id [{}]", wid); + bulkProcessor.add(request); } /** @@ -140,9 +137,9 @@ public class TriggeredWatchStore extends AbstractComponent { return Collections.emptyList(); } - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { - client.admin().indices().refresh(new RefreshRequest(TriggeredWatchStoreField.INDEX_NAME)) - .actionGet(TimeValue.timeValueSeconds(5)); + try { + RefreshRequest request = new RefreshRequest(TriggeredWatchStoreField.INDEX_NAME); + client.admin().indices().refresh(request).actionGet(TimeValue.timeValueSeconds(5)); } catch (IndexNotFoundException e) { return Collections.emptyList(); } @@ -159,7 +156,7 @@ public class TriggeredWatchStore extends AbstractComponent { .version(true)); SearchResponse response = null; - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits()); while (response.getHits().getHits().length != 0) { @@ -176,14 +173,18 @@ public class TriggeredWatchStore extends AbstractComponent { } } finally { if (response != null) { - try (ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(response.getScrollId()); - client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); - } + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(response.getScrollId()); + client.clearScroll(clearScrollRequest).actionGet(scrollTimeout); } } return triggeredWatches; } + + public static boolean validate(ClusterState state) { + IndexMetaData indexMetaData = WatchStoreUtils.getConcreteIndex(TriggeredWatchStoreField.INDEX_NAME, state.metaData()); + return indexMetaData == null || (indexMetaData.getState() == IndexMetaData.State.OPEN && + state.routingTable().index(indexMetaData.getIndex()).allPrimaryShardsActive()); + } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java index 64e909a2f73..723568f8ba7 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/history/HistoryStore.java @@ -7,17 +7,14 @@ package org.elasticsearch.xpack.watcher.history; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.xpack.core.watcher.execution.ExecutionState; import org.elasticsearch.xpack.core.watcher.history.HistoryStoreField; import org.elasticsearch.xpack.core.watcher.history.WatchRecord; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; @@ -26,37 +23,18 @@ import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import static org.elasticsearch.xpack.core.ClientHelper.WATCHER_ORIGIN; -import static org.elasticsearch.xpack.core.ClientHelper.stashWithOrigin; import static org.elasticsearch.xpack.core.watcher.support.Exceptions.ioException; -public class HistoryStore extends AbstractComponent implements AutoCloseable { +public class HistoryStore extends AbstractComponent { public static final String DOC_TYPE = "doc"; - private final Client client; + private final BulkProcessor bulkProcessor; - private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); - private final Lock putUpdateLock = readWriteLock.readLock(); - private final Lock stopLock = readWriteLock.writeLock(); - - public HistoryStore(Settings settings, Client client) { + public HistoryStore(Settings settings, BulkProcessor bulkProcessor) { super(settings); - this.client = client; - } - - @Override - public void close() { - // This will block while put or update actions are underway - stopLock.lock(); - stopLock.unlock(); + this.bulkProcessor = bulkProcessor; } /** @@ -65,20 +43,14 @@ public class HistoryStore extends AbstractComponent implements AutoCloseable { */ public void put(WatchRecord watchRecord) throws Exception { String index = HistoryStoreField.getHistoryIndexNameForTime(watchRecord.triggerEvent().triggeredTime()); - putUpdateLock.lock(); - try (XContentBuilder builder = XContentFactory.jsonBuilder(); - ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { watchRecord.toXContent(builder, WatcherParams.HIDE_SECRETS); - IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) - .source(builder) - .opType(IndexRequest.OpType.CREATE); - client.index(request).actionGet(30, TimeUnit.SECONDS); - logger.debug("indexed watch history record [{}]", watchRecord.id().value()); + IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()).source(builder); + request.opType(IndexRequest.OpType.CREATE); + bulkProcessor.add(request); } catch (IOException ioe) { throw ioException("failed to persist watch record [{}]", ioe, watchRecord); - } finally { - putUpdateLock.unlock(); } } @@ -88,33 +60,14 @@ public class HistoryStore extends AbstractComponent implements AutoCloseable { */ public void forcePut(WatchRecord watchRecord) { String index = HistoryStoreField.getHistoryIndexNameForTime(watchRecord.triggerEvent().triggeredTime()); - putUpdateLock.lock(); - try { - try (XContentBuilder builder = XContentFactory.jsonBuilder(); - ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { watchRecord.toXContent(builder, WatcherParams.HIDE_SECRETS); - IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) - .source(builder) - .opType(IndexRequest.OpType.CREATE); - client.index(request).get(30, TimeUnit.SECONDS); - logger.debug("indexed watch history record [{}]", watchRecord.id().value()); - } catch (VersionConflictEngineException vcee) { - watchRecord = new WatchRecord.MessageWatchRecord(watchRecord, ExecutionState.EXECUTED_MULTIPLE_TIMES, - "watch record [{ " + watchRecord.id() + " }] has been stored before, previous state [" + watchRecord.state() + "]"); - try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(); - ThreadContext.StoredContext ignore = stashWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN)) { - IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()) - .source(xContentBuilder.value(watchRecord)); - client.index(request).get(30, TimeUnit.SECONDS); - } - logger.debug("overwrote watch history record [{}]", watchRecord.id().value()); - } - } catch (InterruptedException | ExecutionException | TimeoutException | IOException ioe) { + IndexRequest request = new IndexRequest(index, DOC_TYPE, watchRecord.id().value()).source(builder); + bulkProcessor.add(request); + } catch (IOException ioe) { final WatchRecord wr = watchRecord; logger.error((Supplier) () -> new ParameterizedMessage("failed to persist watch record [{}]", wr), ioe); - } finally { - putUpdateLock.unlock(); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccount.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccount.java index 8af00ae8f81..c33e788b614 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccount.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/IntegrationAccount.java @@ -88,7 +88,6 @@ public class IntegrationAccount extends HipChatAccount { sentMessages.add(SentMessages.SentMessage.responded(room, SentMessages.SentMessage.TargetType.ROOM, message, request, response)); } catch (Exception e) { - logger.error("failed to execute hipchat api http request", e); sentMessages.add(SentMessages.SentMessage.error(room, SentMessages.SentMessage.TargetType.ROOM, message, e)); } return new SentMessages(name, sentMessages); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index f38f4ad6a86..4012c8d24b5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -7,15 +7,23 @@ package org.elasticsearch.xpack.watcher.execution; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; +import org.elasticsearch.action.bulk.BulkAction; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; -import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.AliasMetaData; @@ -65,6 +73,9 @@ import org.junit.Before; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; import static java.util.Collections.singleton; import static org.hamcrest.Matchers.equalTo; @@ -79,7 +90,6 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; public class TriggeredWatchStoreTests extends ESTestCase { @@ -92,15 +102,34 @@ public class TriggeredWatchStoreTests extends ESTestCase { private Client client; private TriggeredWatch.Parser parser; private TriggeredWatchStore triggeredWatchStore; + private final Map bulks = new LinkedHashMap<>(); + private BulkProcessor.Listener listener = new BulkProcessor.Listener() { + @Override + public void beforeBulk(long executionId, BulkRequest request) { + } + + @Override + public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + bulks.put(request, response); + } + + @Override + public void afterBulk(long executionId, BulkRequest request, Throwable failure) { + throw new ElasticsearchException(failure); + } + }; @Before public void init() { + Settings settings = Settings.builder().put("node.name", randomAlphaOfLength(10)).build(); client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); + when(client.settings()).thenReturn(settings); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); parser = mock(TriggeredWatch.Parser.class); - triggeredWatchStore = new TriggeredWatchStore(Settings.EMPTY, client, parser); + BulkProcessor bulkProcessor = BulkProcessor.builder(client, listener).setConcurrentRequests(0).setBulkActions(1).build(); + triggeredWatchStore = new TriggeredWatchStore(settings, client, parser, bulkProcessor); } public void testFindTriggeredWatchesEmptyCollection() { @@ -174,14 +203,11 @@ public class TriggeredWatchStoreTests extends ESTestCase { csBuilder.routingTable(routingTableBuilder.build()); ClusterState cs = csBuilder.build(); - RefreshResponse refreshResponse = mockRefreshResponse(1, 1); - AdminClient adminClient = mock(AdminClient.class); - when(client.admin()).thenReturn(adminClient); - IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); - when(adminClient.indices()).thenReturn(indicesAdminClient); - PlainActionFuture future = PlainActionFuture.newFuture(); - when(indicesAdminClient.refresh(any())).thenReturn(future); - future.onResponse(refreshResponse); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(mockRefreshResponse(1, 1)); + return null; + }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); SearchResponse searchResponse1 = mock(SearchResponse.class); when(searchResponse1.getSuccessfulShards()).thenReturn(1); @@ -194,9 +220,11 @@ public class TriggeredWatchStoreTests extends ESTestCase { SearchHits hits = new SearchHits(new SearchHit[]{hit}, 1, 1.0f); when(searchResponse1.getHits()).thenReturn(hits); when(searchResponse1.getScrollId()).thenReturn("_scrollId"); - PlainActionFuture searchFuture = PlainActionFuture.newFuture(); - when(client.search(any(SearchRequest.class))).thenReturn(searchFuture); - searchFuture.onResponse(searchResponse1); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(searchResponse1); + return null; + }).when(client).execute(eq(SearchAction.INSTANCE), any(), any()); // First return a scroll response with a single hit and then with no hits hit = new SearchHit(0, "second_foo", new Text(TriggeredWatchStoreField.DOC_TYPE), null); @@ -209,24 +237,27 @@ public class TriggeredWatchStoreTests extends ESTestCase { SearchResponse searchResponse3 = new SearchResponse(InternalSearchResponse.empty(), "_scrollId2", 1, 1, 0, 1, null, null); doAnswer(invocation -> { - SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[0]; - PlainActionFuture searchScrollFuture = PlainActionFuture.newFuture(); + SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; if (request.scrollId().equals("_scrollId")) { - searchScrollFuture.onResponse(searchResponse2); + listener.onResponse(searchResponse2); } else if (request.scrollId().equals("_scrollId1")) { - searchScrollFuture.onResponse(searchResponse3); + listener.onResponse(searchResponse3); } else { - searchScrollFuture.onFailure(new ElasticsearchException("test issue")); + listener.onFailure(new ElasticsearchException("test issue")); } - return searchScrollFuture; - }).when(client).searchScroll(any()); + return null; + }).when(client).execute(eq(SearchScrollAction.INSTANCE), any(), any()); TriggeredWatch triggeredWatch = mock(TriggeredWatch.class); when(parser.parse(eq("_id"), eq(1L), any(BytesReference.class))).thenReturn(triggeredWatch); - PlainActionFuture clearScrollResponseFuture = PlainActionFuture.newFuture(); - when(client.clearScroll(any())).thenReturn(clearScrollResponseFuture); - clearScrollResponseFuture.onResponse(new ClearScrollResponse(true, 1)); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new ClearScrollResponse(true, 1)); + return null; + + }).when(client).execute(eq(ClearScrollAction.INSTANCE), any(), any()); assertThat(TriggeredWatchStore.validate(cs), is(true)); DateTime now = DateTime.now(UTC); @@ -251,10 +282,10 @@ public class TriggeredWatchStoreTests extends ESTestCase { assertThat(triggeredWatches, notNullValue()); assertThat(triggeredWatches, hasSize(watches.size())); - verify(client.admin().indices(), times(1)).refresh(any()); - verify(client, times(1)).search(any(SearchRequest.class)); - verify(client, times(2)).searchScroll(any()); - verify(client, times(1)).clearScroll(any()); + verify(client, times(1)).execute(eq(RefreshAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(SearchAction.INSTANCE), any(), any()); + verify(client, times(2)).execute(eq(SearchScrollAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(ClearScrollAction.INSTANCE), any(), any()); } // the elasticsearch migration helper is doing reindex using aliases, so we have to @@ -332,7 +363,7 @@ public class TriggeredWatchStoreTests extends ESTestCase { assertThat(TriggeredWatchStore.validate(cs), is(true)); Watch watch = mock(Watch.class); triggeredWatchStore.findTriggeredWatches(Collections.singletonList(watch), cs); - verifyZeroInteractions(client); + verify(client, times(0)).execute(any(), any(), any()); } public void testIndexNotFoundButInMetaData() { @@ -344,13 +375,11 @@ public class TriggeredWatchStoreTests extends ESTestCase { ClusterState cs = csBuilder.build(); Watch watch = mock(Watch.class); - AdminClient adminClient = mock(AdminClient.class); - when(client.admin()).thenReturn(adminClient); - IndicesAdminClient indicesAdminClient = mock(IndicesAdminClient.class); - when(adminClient.indices()).thenReturn(indicesAdminClient); - PlainActionFuture future = PlainActionFuture.newFuture(); - when(indicesAdminClient.refresh(any())).thenReturn(future); - future.onFailure(new IndexNotFoundException(TriggeredWatchStoreField.INDEX_NAME)); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onFailure(new IndexNotFoundException(TriggeredWatchStoreField.INDEX_NAME)); + return null; + }).when(client).execute(eq(RefreshAction.INSTANCE), any(), any()); Collection triggeredWatches = triggeredWatchStore.findTriggeredWatches(Collections.singletonList(watch), cs); assertThat(triggeredWatches, hasSize(0)); @@ -381,6 +410,65 @@ public class TriggeredWatchStoreTests extends ESTestCase { assertThat(BytesReference.bytes(jsonBuilder).utf8ToString(), equalTo(BytesReference.bytes(jsonBuilder2).utf8ToString())); } + public void testPutTriggeredWatches() throws Exception { + DateTime now = DateTime.now(UTC); + int numberOfTriggeredWatches = randomIntBetween(1, 100); + + List triggeredWatches = new ArrayList<>(numberOfTriggeredWatches); + for (int i = 0; i < numberOfTriggeredWatches; i++) { + triggeredWatches.add(new TriggeredWatch(new Wid("watch_id_", now), new ScheduleTriggerEvent("watch_id", now, now))); + } + + doAnswer(invocation -> { + BulkRequest bulkRequest = (BulkRequest) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + + int size = bulkRequest.requests().size(); + BulkItemResponse[] bulkItemResponse = new BulkItemResponse[size]; + for (int i = 0; i < size; i++) { + DocWriteRequest writeRequest = bulkRequest.requests().get(i); + ShardId shardId = new ShardId(TriggeredWatchStoreField.INDEX_NAME, "uuid", 0); + IndexResponse indexResponse = new IndexResponse(shardId, writeRequest.type(), writeRequest.id(), 1, 1, 1, true); + bulkItemResponse[i] = new BulkItemResponse(0, writeRequest.opType(), indexResponse); + } + + listener.onResponse(new BulkResponse(bulkItemResponse, 123)); + return null; + }).when(client).execute(eq(BulkAction.INSTANCE), any(), any()); + + + BulkResponse response = triggeredWatchStore.putAll(triggeredWatches); + assertThat(response.hasFailures(), is(false)); + assertThat(response.getItems().length, is(numberOfTriggeredWatches)); + } + + public void testDeleteTriggeredWatches() throws Exception { + DateTime now = DateTime.now(UTC); + + doAnswer(invocation -> { + BulkRequest bulkRequest = (BulkRequest) invocation.getArguments()[0]; + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + + int size = bulkRequest.requests().size(); + BulkItemResponse[] bulkItemResponse = new BulkItemResponse[size]; + for (int i = 0; i < size; i++) { + DocWriteRequest writeRequest = bulkRequest.requests().get(i); + ShardId shardId = new ShardId(TriggeredWatchStoreField.INDEX_NAME, "uuid", 0); + IndexResponse indexResponse = new IndexResponse(shardId, writeRequest.type(), writeRequest.id(), 1, 1, 1, true); + bulkItemResponse[i] = new BulkItemResponse(0, writeRequest.opType(), indexResponse); + } + + listener.onResponse(new BulkResponse(bulkItemResponse, 123)); + return null; + }).when(client).bulk(any(), any()); + + triggeredWatchStore.delete(new Wid("watch_id_", now)); + assertThat(bulks.keySet(), hasSize(1)); + BulkResponse response = bulks.values().iterator().next(); + assertThat(response.hasFailures(), is(false)); + assertThat(response.getItems().length, is(1)); + } + private RefreshResponse mockRefreshResponse(int total, int successful) { RefreshResponse refreshResponse = mock(RefreshResponse.class); when(refreshResponse.getTotalShards()).thenReturn(total); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java index 8f1cce93055..19bf1ba5a1f 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/history/HistoryStoreTests.java @@ -7,10 +7,14 @@ package org.elasticsearch.xpack.watcher.history; import org.apache.http.HttpStatus; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest.OpType; +import org.elasticsearch.action.bulk.BulkItemResponse; +import org.elasticsearch.action.bulk.BulkProcessor; +import org.elasticsearch.action.bulk.BulkRequest; +import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -42,6 +46,7 @@ import static org.elasticsearch.xpack.core.watcher.history.HistoryStoreField.get import static org.elasticsearch.xpack.core.watcher.support.WatcherIndexTemplateRegistryField.INDEX_TEMPLATE_VERSION; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.core.IsEqual.equalTo; import static org.joda.time.DateTimeZone.UTC; @@ -58,11 +63,15 @@ public class HistoryStoreTests extends ESTestCase { @Before public void init() { + Settings settings = Settings.builder().put("node.name", randomAlphaOfLength(10)).build(); client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); - when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - historyStore = new HistoryStore(Settings.EMPTY, client); + when(client.settings()).thenReturn(settings); + when(threadPool.getThreadContext()).thenReturn(new ThreadContext(settings)); + BulkProcessor.Listener listener = mock(BulkProcessor.Listener.class); + BulkProcessor bulkProcessor = BulkProcessor.builder(client, listener).setConcurrentRequests(0).setBulkActions(1).build(); + historyStore = new HistoryStore(settings, bulkProcessor); } public void testPut() throws Exception { @@ -75,19 +84,21 @@ public class HistoryStoreTests extends ESTestCase { IndexResponse indexResponse = mock(IndexResponse.class); doAnswer(invocation -> { - IndexRequest request = (IndexRequest) invocation.getArguments()[0]; - PlainActionFuture indexFuture = PlainActionFuture.newFuture(); - if (request.id().equals(wid.value()) && request.type().equals(HistoryStore.DOC_TYPE) && request.opType() == OpType.CREATE - && request.index().equals(index)) { - indexFuture.onResponse(indexResponse); + BulkRequest request = (BulkRequest) invocation.getArguments()[1]; + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + + IndexRequest indexRequest = (IndexRequest) request.requests().get(0); + if (indexRequest.id().equals(wid.value()) && indexRequest.type().equals(HistoryStore.DOC_TYPE) && + indexRequest.opType() == OpType.CREATE && indexRequest.index().equals(index)) { + listener.onResponse(new BulkResponse(new BulkItemResponse[]{ new BulkItemResponse(1, OpType.CREATE, indexResponse) }, 1)); } else { - indexFuture.onFailure(new ElasticsearchException("test issue")); + listener.onFailure(new ElasticsearchException("test issue")); } - return indexFuture; - }).when(client).index(any()); + return null; + }).when(client).bulk(any(), any()); historyStore.put(watchRecord); - verify(client).index(any()); + verify(client).bulk(any(), any()); } public void testIndexNameGeneration() { @@ -139,10 +150,15 @@ public class HistoryStoreTests extends ESTestCase { } watchRecord.result().actionsResults().put(JiraAction.TYPE, result); - PlainActionFuture indexResponseFuture = PlainActionFuture.newFuture(); - indexResponseFuture.onResponse(mock(IndexResponse.class)); - ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(IndexRequest.class); - when(client.index(requestCaptor.capture())).thenReturn(indexResponseFuture); + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(BulkRequest.class); + doAnswer(invocation -> { + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + + IndexResponse indexResponse = mock(IndexResponse.class); + listener.onResponse(new BulkResponse(new BulkItemResponse[]{ new BulkItemResponse(1, OpType.CREATE, indexResponse) }, 1)); + return null; + }).when(client).bulk(requestCaptor.capture(), any()); + if (randomBoolean()) { historyStore.put(watchRecord); } else { @@ -150,7 +166,9 @@ public class HistoryStoreTests extends ESTestCase { } assertThat(requestCaptor.getAllValues(), hasSize(1)); - String indexedJson = requestCaptor.getValue().source().utf8ToString(); + assertThat(requestCaptor.getValue().requests().get(0), instanceOf(IndexRequest.class)); + IndexRequest capturedIndexRequest = (IndexRequest) requestCaptor.getValue().requests().get(0); + String indexedJson = capturedIndexRequest.source().utf8ToString(); assertThat(indexedJson, containsString(username)); assertThat(indexedJson, not(containsString(password))); } From 87cedef3cfca9b8a03100f7de4a3c1d40ac260d9 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 18 Sep 2018 10:29:02 +0200 Subject: [PATCH 13/32] NETWORKING:Def CName in Http Publish Addr to True (#33631) * Follow up to #32806 setting the setting to true for 7.x --- .../main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 3 --- docs/build.gradle | 2 -- modules/lang-painless/build.gradle | 1 - server/src/main/java/org/elasticsearch/http/HttpInfo.java | 2 +- 4 files changed, 1 insertion(+), 7 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 06c0827f1ff..0553f939595 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -828,9 +828,6 @@ class BuildPlugin implements Plugin { // TODO: remove this once ctx isn't added to update script params in 7.0 systemProperty 'es.scripting.update.ctx_in_params', 'false' - //TODO: remove this once the cname is prepended to the address by default in 7.0 - systemProperty 'es.http.cname_in_publish_address', 'true' - // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM if (project.inFipsJvm) { systemProperty 'javax.net.ssl.trustStorePassword', 'password' diff --git a/docs/build.gradle b/docs/build.gradle index aa075d05cd5..864567ba835 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -56,8 +56,6 @@ integTestCluster { // TODO: remove this for 7.0, this exists to allow the doc examples in 6.x to continue using the defaults systemProperty 'es.scripting.update.ctx_in_params', 'false' - //TODO: remove this once the cname is prepended to the address by default in 7.0 - systemProperty 'es.http.cname_in_publish_address', 'true' } // remove when https://github.com/elastic/elasticsearch/issues/31305 is fixed diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 6f68c667fe6..b3cab595201 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -25,7 +25,6 @@ esplugin { integTestCluster { module project.project(':modules:mapper-extras') systemProperty 'es.scripting.update.ctx_in_params', 'false' - systemProperty 'es.http.cname_in_publish_address', 'true' } dependencies { diff --git a/server/src/main/java/org/elasticsearch/http/HttpInfo.java b/server/src/main/java/org/elasticsearch/http/HttpInfo.java index aece8131994..22bcd31850d 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpInfo.java +++ b/server/src/main/java/org/elasticsearch/http/HttpInfo.java @@ -41,7 +41,7 @@ public class HttpInfo implements Writeable, ToXContentFragment { /** Whether to add hostname to publish host field when serializing. */ private static final boolean CNAME_IN_PUBLISH_HOST = - parseBoolean(System.getProperty("es.http.cname_in_publish_address"), false); + parseBoolean(System.getProperty("es.http.cname_in_publish_address"), true); private final BoundTransportAddress address; private final long maxContentLength; From ab9c28a2b11142f86d6ca2c3bb52ab6bd5d39558 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 18 Sep 2018 11:44:52 +0300 Subject: [PATCH 14/32] SQL: Grammar tweak for number declarations (#33767) Consider plus and minus as part of a number declaration (to avoid the minus be treated as a negation). Close #33765 --- x-pack/plugin/sql/src/main/antlr/SqlBase.g4 | 4 +- .../xpack/sql/parser/SqlBaseParser.java | 249 +++++++++--------- .../xpack/sql/parser/ExpressionTests.java | 44 +++- 3 files changed, 157 insertions(+), 140 deletions(-) diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index 9af2bd6a011..396cc70920a 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -305,8 +305,8 @@ unquoteIdentifier ; number - : DECIMAL_VALUE #decimalLiteral - | INTEGER_VALUE #integerLiteral + : (PLUS | MINUS)? DECIMAL_VALUE #decimalLiteral + | (PLUS | MINUS)? INTEGER_VALUE #integerLiteral ; string diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index ebf5b0cb09d..164eacd402b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -3729,54 +3729,9 @@ class SqlBaseParser extends Parser { enterOuterAlt(_localctx, 1); { setState(548); - switch (_input.LA(1)) { - case T__0: - case ANALYZE: - case ANALYZED: - case CAST: - case CATALOGS: - case COLUMNS: - case DEBUG: - case EXECUTABLE: - case EXPLAIN: - case EXTRACT: - case FALSE: - case FORMAT: - case FUNCTIONS: - case GRAPHVIZ: - case LEFT: - case MAPPED: - case NULL: - case OPTIMIZED: - case PARSED: - case PHYSICAL: - case PLAN: - case RIGHT: - case RLIKE: - case QUERY: - case SCHEMAS: - case SHOW: - case SYS: - case TABLES: - case TEXT: - case TRUE: - case TYPE: - case TYPES: - case VERIFY: - case FUNCTION_ESC: - case DATE_ESC: - case TIME_ESC: - case TIMESTAMP_ESC: - case GUID_ESC: - case ASTERISK: - case PARAM: - case STRING: - case INTEGER_VALUE: - case DECIMAL_VALUE: - case IDENTIFIER: - case DIGIT_IDENTIFIER: - case QUOTED_IDENTIFIER: - case BACKQUOTED_IDENTIFIER: + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,74,_ctx) ) { + case 1: { _localctx = new ValueExpressionDefaultContext(_localctx); _ctx = _localctx; @@ -3786,8 +3741,7 @@ class SqlBaseParser extends Parser { primaryExpression(); } break; - case PLUS: - case MINUS: + case 2: { _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; @@ -3804,8 +3758,6 @@ class SqlBaseParser extends Parser { valueExpression(4); } break; - default: - throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); setState(562); @@ -4925,6 +4877,8 @@ class SqlBaseParser extends Parser { match(NULL); } break; + case PLUS: + case MINUS: case INTEGER_VALUE: case DECIMAL_VALUE: _localctx = new NumericLiteralContext(_localctx); @@ -5654,6 +5608,8 @@ class SqlBaseParser extends Parser { } public static class DecimalLiteralContext extends NumberContext { public TerminalNode DECIMAL_VALUE() { return getToken(SqlBaseParser.DECIMAL_VALUE, 0); } + public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); } + public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); } public DecimalLiteralContext(NumberContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { @@ -5671,6 +5627,8 @@ class SqlBaseParser extends Parser { } public static class IntegerLiteralContext extends NumberContext { public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); } + public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); } + public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); } public IntegerLiteralContext(NumberContext ctx) { copyFrom(ctx); } @Override public void enterRule(ParseTreeListener listener) { @@ -5690,27 +5648,55 @@ class SqlBaseParser extends Parser { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); enterRule(_localctx, 92, RULE_number); + int _la; try { - setState(717); - switch (_input.LA(1)) { - case DECIMAL_VALUE: + setState(723); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,97,_ctx) ) { + case 1: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(715); + setState(716); + _la = _input.LA(1); + if (_la==PLUS || _la==MINUS) { + { + setState(715); + _la = _input.LA(1); + if ( !(_la==PLUS || _la==MINUS) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } + + setState(718); match(DECIMAL_VALUE); } break; - case INTEGER_VALUE: + case 2: _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(716); + setState(720); + _la = _input.LA(1); + if (_la==PLUS || _la==MINUS) { + { + setState(719); + _la = _input.LA(1); + if ( !(_la==PLUS || _la==MINUS) ) { + _errHandler.recoverInline(this); + } else { + consume(); + } + } + } + + setState(722); match(INTEGER_VALUE); } break; - default: - throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -5753,7 +5739,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(719); + setState(725); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -5825,7 +5811,7 @@ class SqlBaseParser extends Parser { try { enterOuterAlt(_localctx, 1); { - setState(721); + setState(727); _la = _input.LA(1); if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5876,7 +5862,7 @@ class SqlBaseParser extends Parser { } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d6\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02dc\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -5930,62 +5916,63 @@ class SqlBaseParser extends Parser { "\u028d\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'"+ "\3\'\5\'\u02a1\n\'\3(\3(\3)\3)\3*\3*\3+\3+\3+\7+\u02ac\n+\f+\16+\u02af"+ "\13+\3+\3+\3,\3,\5,\u02b5\n,\3-\3-\3-\5-\u02ba\n-\3-\3-\3-\3-\5-\u02c0"+ - "\n-\3-\5-\u02c3\n-\3.\3.\5.\u02c7\n.\3/\3/\3/\5/\u02cc\n/\3\60\3\60\5"+ - "\60\u02d0\n\60\3\61\3\61\3\62\3\62\3\62\2\4.:\63\2\4\6\b\n\f\16\20\22"+ - "\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`b\2\20\b"+ - "\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2"+ - "\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35"+ - "\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>?ABDEGG"+ - "\u0330\2d\3\2\2\2\4g\3\2\2\2\6\u00ce\3\2\2\2\b\u00d9\3\2\2\2\n\u00dd\3"+ - "\2\2\2\f\u00f2\3\2\2\2\16\u00f9\3\2\2\2\20\u00fb\3\2\2\2\22\u00ff\3\2"+ - "\2\2\24\u011b\3\2\2\2\26\u0125\3\2\2\2\30\u012f\3\2\2\2\32\u013e\3\2\2"+ - "\2\34\u0140\3\2\2\2\36\u0146\3\2\2\2 \u0148\3\2\2\2\"\u014f\3\2\2\2$\u0161"+ - "\3\2\2\2&\u0172\3\2\2\2(\u0182\3\2\2\2*\u019d\3\2\2\2,\u019f\3\2\2\2."+ - "\u01d2\3\2\2\2\60\u01df\3\2\2\2\62\u0211\3\2\2\2\64\u0213\3\2\2\2\66\u0216"+ - "\3\2\2\28\u0220\3\2\2\2:\u0226\3\2\2\2<\u024c\3\2\2\2>\u0253\3\2\2\2@"+ - "\u0255\3\2\2\2B\u0261\3\2\2\2D\u0263\3\2\2\2F\u026f\3\2\2\2H\u0271\3\2"+ - "\2\2J\u0285\3\2\2\2L\u02a0\3\2\2\2N\u02a2\3\2\2\2P\u02a4\3\2\2\2R\u02a6"+ - "\3\2\2\2T\u02ad\3\2\2\2V\u02b4\3\2\2\2X\u02c2\3\2\2\2Z\u02c6\3\2\2\2\\"+ - "\u02cb\3\2\2\2^\u02cf\3\2\2\2`\u02d1\3\2\2\2b\u02d3\3\2\2\2de\5\6\4\2"+ - "ef\7\2\2\3f\3\3\2\2\2gh\5,\27\2hi\7\2\2\3i\5\3\2\2\2j\u00cf\5\b\5\2ky"+ - "\7\33\2\2lu\7\3\2\2mn\78\2\2nt\t\2\2\2op\7\36\2\2pt\t\3\2\2qr\7G\2\2r"+ - "t\5P)\2sm\3\2\2\2so\3\2\2\2sq\3\2\2\2tw\3\2\2\2us\3\2\2\2uv\3\2\2\2vx"+ - "\3\2\2\2wu\3\2\2\2xz\7\4\2\2yl\3\2\2\2yz\3\2\2\2z{\3\2\2\2{\u00cf\5\6"+ - "\4\2|\u0088\7\24\2\2}\u0084\7\3\2\2~\177\78\2\2\177\u0083\t\4\2\2\u0080"+ - "\u0081\7\36\2\2\u0081\u0083\t\3\2\2\u0082~\3\2\2\2\u0082\u0080\3\2\2\2"+ - "\u0083\u0086\3\2\2\2\u0084\u0082\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0087"+ - "\3\2\2\2\u0086\u0084\3\2\2\2\u0087\u0089\7\4\2\2\u0088}\3\2\2\2\u0088"+ - "\u0089\3\2\2\2\u0089\u008a\3\2\2\2\u008a\u00cf\5\6\4\2\u008b\u008c\7>"+ - "\2\2\u008c\u008f\7A\2\2\u008d\u0090\5\64\33\2\u008e\u0090\5X-\2\u008f"+ - "\u008d\3\2\2\2\u008f\u008e\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u00cf\3\2"+ - "\2\2\u0091\u0092\7>\2\2\u0092\u0093\7\23\2\2\u0093\u0096\t\5\2\2\u0094"+ - "\u0097\5\64\33\2\u0095\u0097\5X-\2\u0096\u0094\3\2\2\2\u0096\u0095\3\2"+ - "\2\2\u0097\u00cf\3\2\2\2\u0098\u009b\t\6\2\2\u0099\u009c\5\64\33\2\u009a"+ - "\u009c\5X-\2\u009b\u0099\3\2\2\2\u009b\u009a\3\2\2\2\u009c\u00cf\3\2\2"+ - "\2\u009d\u009e\7>\2\2\u009e\u00a0\7!\2\2\u009f\u00a1\5\64\33\2\u00a0\u009f"+ - "\3\2\2\2\u00a0\u00a1\3\2\2\2\u00a1\u00cf\3\2\2\2\u00a2\u00a3\7>\2\2\u00a3"+ - "\u00cf\7<\2\2\u00a4\u00a5\7?\2\2\u00a5\u00cf\7\22\2\2\u00a6\u00a7\7?\2"+ - "\2\u00a7\u00aa\7A\2\2\u00a8\u00a9\7\21\2\2\u00a9\u00ab\5\64\33\2\u00aa"+ - "\u00a8\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00ae\3\2\2\2\u00ac\u00af\5\64"+ - "\33\2\u00ad\u00af\5X-\2\u00ae\u00ac\3\2\2\2\u00ae\u00ad\3\2\2\2\u00ae"+ - "\u00af\3\2\2\2\u00af\u00b9\3\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5`\61"+ - "\2\u00b2\u00b3\7\5\2\2\u00b3\u00b5\5`\61\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8"+ - "\3\2\2\2\u00b6\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8"+ - "\u00b6\3\2\2\2\u00b9\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00cf\3\2"+ - "\2\2\u00bb\u00bc\7?\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be"+ - "\u00c0\5`\61\2\u00bf\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c4\3\2"+ - "\2\2\u00c1\u00c2\7@\2\2\u00c2\u00c5\5\64\33\2\u00c3\u00c5\5X-\2\u00c4"+ - "\u00c1\3\2\2\2\u00c4\u00c3\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c7\3\2"+ - "\2\2\u00c6\u00c8\5\64\33\2\u00c7\u00c6\3\2\2\2\u00c7\u00c8\3\2\2\2\u00c8"+ - "\u00cf\3\2\2\2\u00c9\u00ca\7?\2\2\u00ca\u00cf\7E\2\2\u00cb\u00cc\7?\2"+ - "\2\u00cc\u00cd\7@\2\2\u00cd\u00cf\7E\2\2\u00cej\3\2\2\2\u00cek\3\2\2\2"+ - "\u00ce|\3\2\2\2\u00ce\u008b\3\2\2\2\u00ce\u0091\3\2\2\2\u00ce\u0098\3"+ - "\2\2\2\u00ce\u009d\3\2\2\2\u00ce\u00a2\3\2\2\2\u00ce\u00a4\3\2\2\2\u00ce"+ - "\u00a6\3\2\2\2\u00ce\u00bb\3\2\2\2\u00ce\u00c9\3\2\2\2\u00ce\u00cb\3\2"+ - "\2\2\u00cf\7\3\2\2\2\u00d0\u00d1\7I\2\2\u00d1\u00d6\5\34\17\2\u00d2\u00d3"+ - "\7\5\2\2\u00d3\u00d5\5\34\17\2\u00d4\u00d2\3\2\2\2\u00d5\u00d8\3\2\2\2"+ - "\u00d6\u00d4\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6"+ - "\3\2\2\2\u00d9\u00d0\3\2\2\2\u00d9\u00da\3\2\2\2\u00da\u00db\3\2\2\2\u00db"+ + "\n-\3-\5-\u02c3\n-\3.\3.\5.\u02c7\n.\3/\3/\3/\5/\u02cc\n/\3\60\5\60\u02cf"+ + "\n\60\3\60\3\60\5\60\u02d3\n\60\3\60\5\60\u02d6\n\60\3\61\3\61\3\62\3"+ + "\62\3\62\2\4.:\63\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62"+ + "\64\668:<>@BDFHJLNPRTVXZ\\^`b\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2"+ + "\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2"+ + "\7\7\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33"+ + "\33\36\36!\",,\62\62\668:<>?ABDEGG\u0338\2d\3\2\2\2\4g\3\2\2\2\6\u00ce"+ + "\3\2\2\2\b\u00d9\3\2\2\2\n\u00dd\3\2\2\2\f\u00f2\3\2\2\2\16\u00f9\3\2"+ + "\2\2\20\u00fb\3\2\2\2\22\u00ff\3\2\2\2\24\u011b\3\2\2\2\26\u0125\3\2\2"+ + "\2\30\u012f\3\2\2\2\32\u013e\3\2\2\2\34\u0140\3\2\2\2\36\u0146\3\2\2\2"+ + " \u0148\3\2\2\2\"\u014f\3\2\2\2$\u0161\3\2\2\2&\u0172\3\2\2\2(\u0182\3"+ + "\2\2\2*\u019d\3\2\2\2,\u019f\3\2\2\2.\u01d2\3\2\2\2\60\u01df\3\2\2\2\62"+ + "\u0211\3\2\2\2\64\u0213\3\2\2\2\66\u0216\3\2\2\28\u0220\3\2\2\2:\u0226"+ + "\3\2\2\2<\u024c\3\2\2\2>\u0253\3\2\2\2@\u0255\3\2\2\2B\u0261\3\2\2\2D"+ + "\u0263\3\2\2\2F\u026f\3\2\2\2H\u0271\3\2\2\2J\u0285\3\2\2\2L\u02a0\3\2"+ + "\2\2N\u02a2\3\2\2\2P\u02a4\3\2\2\2R\u02a6\3\2\2\2T\u02ad\3\2\2\2V\u02b4"+ + "\3\2\2\2X\u02c2\3\2\2\2Z\u02c6\3\2\2\2\\\u02cb\3\2\2\2^\u02d5\3\2\2\2"+ + "`\u02d7\3\2\2\2b\u02d9\3\2\2\2de\5\6\4\2ef\7\2\2\3f\3\3\2\2\2gh\5,\27"+ + "\2hi\7\2\2\3i\5\3\2\2\2j\u00cf\5\b\5\2ky\7\33\2\2lu\7\3\2\2mn\78\2\2n"+ + "t\t\2\2\2op\7\36\2\2pt\t\3\2\2qr\7G\2\2rt\5P)\2sm\3\2\2\2so\3\2\2\2sq"+ + "\3\2\2\2tw\3\2\2\2us\3\2\2\2uv\3\2\2\2vx\3\2\2\2wu\3\2\2\2xz\7\4\2\2y"+ + "l\3\2\2\2yz\3\2\2\2z{\3\2\2\2{\u00cf\5\6\4\2|\u0088\7\24\2\2}\u0084\7"+ + "\3\2\2~\177\78\2\2\177\u0083\t\4\2\2\u0080\u0081\7\36\2\2\u0081\u0083"+ + "\t\3\2\2\u0082~\3\2\2\2\u0082\u0080\3\2\2\2\u0083\u0086\3\2\2\2\u0084"+ + "\u0082\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0087\3\2\2\2\u0086\u0084\3\2"+ + "\2\2\u0087\u0089\7\4\2\2\u0088}\3\2\2\2\u0088\u0089\3\2\2\2\u0089\u008a"+ + "\3\2\2\2\u008a\u00cf\5\6\4\2\u008b\u008c\7>\2\2\u008c\u008f\7A\2\2\u008d"+ + "\u0090\5\64\33\2\u008e\u0090\5X-\2\u008f\u008d\3\2\2\2\u008f\u008e\3\2"+ + "\2\2\u008f\u0090\3\2\2\2\u0090\u00cf\3\2\2\2\u0091\u0092\7>\2\2\u0092"+ + "\u0093\7\23\2\2\u0093\u0096\t\5\2\2\u0094\u0097\5\64\33\2\u0095\u0097"+ + "\5X-\2\u0096\u0094\3\2\2\2\u0096\u0095\3\2\2\2\u0097\u00cf\3\2\2\2\u0098"+ + "\u009b\t\6\2\2\u0099\u009c\5\64\33\2\u009a\u009c\5X-\2\u009b\u0099\3\2"+ + "\2\2\u009b\u009a\3\2\2\2\u009c\u00cf\3\2\2\2\u009d\u009e\7>\2\2\u009e"+ + "\u00a0\7!\2\2\u009f\u00a1\5\64\33\2\u00a0\u009f\3\2\2\2\u00a0\u00a1\3"+ + "\2\2\2\u00a1\u00cf\3\2\2\2\u00a2\u00a3\7>\2\2\u00a3\u00cf\7<\2\2\u00a4"+ + "\u00a5\7?\2\2\u00a5\u00cf\7\22\2\2\u00a6\u00a7\7?\2\2\u00a7\u00aa\7A\2"+ + "\2\u00a8\u00a9\7\21\2\2\u00a9\u00ab\5\64\33\2\u00aa\u00a8\3\2\2\2\u00aa"+ + "\u00ab\3\2\2\2\u00ab\u00ae\3\2\2\2\u00ac\u00af\5\64\33\2\u00ad\u00af\5"+ + "X-\2\u00ae\u00ac\3\2\2\2\u00ae\u00ad\3\2\2\2\u00ae\u00af\3\2\2\2\u00af"+ + "\u00b9\3\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5`\61\2\u00b2\u00b3\7\5"+ + "\2\2\u00b3\u00b5\5`\61\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6"+ + "\u00b4\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2"+ + "\2\2\u00b9\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00cf\3\2\2\2\u00bb"+ + "\u00bc\7?\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be\u00c0\5"+ + "`\61\2\u00bf\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c4\3\2\2\2\u00c1"+ + "\u00c2\7@\2\2\u00c2\u00c5\5\64\33\2\u00c3\u00c5\5X-\2\u00c4\u00c1\3\2"+ + "\2\2\u00c4\u00c3\3\2\2\2\u00c4\u00c5\3\2\2\2\u00c5\u00c7\3\2\2\2\u00c6"+ + "\u00c8\5\64\33\2\u00c7\u00c6\3\2\2\2\u00c7\u00c8\3\2\2\2\u00c8\u00cf\3"+ + "\2\2\2\u00c9\u00ca\7?\2\2\u00ca\u00cf\7E\2\2\u00cb\u00cc\7?\2\2\u00cc"+ + "\u00cd\7@\2\2\u00cd\u00cf\7E\2\2\u00cej\3\2\2\2\u00cek\3\2\2\2\u00ce|"+ + "\3\2\2\2\u00ce\u008b\3\2\2\2\u00ce\u0091\3\2\2\2\u00ce\u0098\3\2\2\2\u00ce"+ + "\u009d\3\2\2\2\u00ce\u00a2\3\2\2\2\u00ce\u00a4\3\2\2\2\u00ce\u00a6\3\2"+ + "\2\2\u00ce\u00bb\3\2\2\2\u00ce\u00c9\3\2\2\2\u00ce\u00cb\3\2\2\2\u00cf"+ + "\7\3\2\2\2\u00d0\u00d1\7I\2\2\u00d1\u00d6\5\34\17\2\u00d2\u00d3\7\5\2"+ + "\2\u00d3\u00d5\5\34\17\2\u00d4\u00d2\3\2\2\2\u00d5\u00d8\3\2\2\2\u00d6"+ + "\u00d4\3\2\2\2\u00d6\u00d7\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2"+ + "\2\2\u00d9\u00d0\3\2\2\2\u00d9\u00da\3\2\2\2\u00da\u00db\3\2\2\2\u00db"+ "\u00dc\5\n\6\2\u00dc\t\3\2\2\2\u00dd\u00e8\5\16\b\2\u00de\u00df\7\64\2"+ "\2\u00df\u00e0\7\17\2\2\u00e0\u00e5\5\20\t\2\u00e1\u00e2\7\5\2\2\u00e2"+ "\u00e4\5\20\t\2\u00e3\u00e1\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5\u00e3\3"+ @@ -6152,17 +6139,19 @@ class SqlBaseParser extends Parser { "\7f\2\2\u02c5\u02c7\7g\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c5\3\2\2\2\u02c7"+ "[\3\2\2\2\u02c8\u02cc\7c\2\2\u02c9\u02cc\5b\62\2\u02ca\u02cc\7d\2\2\u02cb"+ "\u02c8\3\2\2\2\u02cb\u02c9\3\2\2\2\u02cb\u02ca\3\2\2\2\u02cc]\3\2\2\2"+ - "\u02cd\u02d0\7b\2\2\u02ce\u02d0\7a\2\2\u02cf\u02cd\3\2\2\2\u02cf\u02ce"+ - "\3\2\2\2\u02d0_\3\2\2\2\u02d1\u02d2\t\16\2\2\u02d2a\3\2\2\2\u02d3\u02d4"+ - "\t\17\2\2\u02d4c\3\2\2\2bsuy\u0082\u0084\u0088\u008f\u0096\u009b\u00a0"+ - "\u00aa\u00ae\u00b6\u00b9\u00bf\u00c4\u00c7\u00ce\u00d6\u00d9\u00e5\u00e8"+ - "\u00eb\u00f2\u00f9\u00fd\u0101\u0108\u010c\u0110\u0115\u0119\u0121\u0125"+ - "\u012c\u0137\u013a\u013e\u014a\u014d\u0153\u015a\u0161\u0164\u0168\u016c"+ - "\u0170\u0172\u017d\u0182\u0186\u0189\u018f\u0192\u0198\u019b\u019d\u01b0"+ - "\u01be\u01cc\u01d2\u01da\u01dc\u01e1\u01e4\u01ec\u01f5\u01fb\u0203\u0208"+ - "\u020e\u0211\u0218\u0220\u0226\u0232\u0234\u023e\u024c\u0253\u0261\u026f"+ - "\u0274\u027b\u027e\u0285\u028d\u02a0\u02ad\u02b4\u02b9\u02bf\u02c2\u02c6"+ - "\u02cb\u02cf"; + "\u02cd\u02cf\t\n\2\2\u02ce\u02cd\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d0"+ + "\3\2\2\2\u02d0\u02d6\7b\2\2\u02d1\u02d3\t\n\2\2\u02d2\u02d1\3\2\2\2\u02d2"+ + "\u02d3\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4\u02d6\7a\2\2\u02d5\u02ce\3\2"+ + "\2\2\u02d5\u02d2\3\2\2\2\u02d6_\3\2\2\2\u02d7\u02d8\t\16\2\2\u02d8a\3"+ + "\2\2\2\u02d9\u02da\t\17\2\2\u02dac\3\2\2\2dsuy\u0082\u0084\u0088\u008f"+ + "\u0096\u009b\u00a0\u00aa\u00ae\u00b6\u00b9\u00bf\u00c4\u00c7\u00ce\u00d6"+ + "\u00d9\u00e5\u00e8\u00eb\u00f2\u00f9\u00fd\u0101\u0108\u010c\u0110\u0115"+ + "\u0119\u0121\u0125\u012c\u0137\u013a\u013e\u014a\u014d\u0153\u015a\u0161"+ + "\u0164\u0168\u016c\u0170\u0172\u017d\u0182\u0186\u0189\u018f\u0192\u0198"+ + "\u019b\u019d\u01b0\u01be\u01cc\u01d2\u01da\u01dc\u01e1\u01e4\u01ec\u01f5"+ + "\u01fb\u0203\u0208\u020e\u0211\u0218\u0220\u0226\u0232\u0234\u023e\u024c"+ + "\u0253\u0261\u026f\u0274\u027b\u027e\u0285\u028d\u02a0\u02ad\u02b4\u02b9"+ + "\u02bf\u02c2\u02c6\u02cb\u02ce\u02d2\u02d5"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index ecb5b83896e..004118e8cd2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; -import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; import org.elasticsearch.xpack.sql.type.DataType; public class ExpressionTests extends ESTestCase { @@ -23,6 +22,30 @@ public class ExpressionTests extends ESTestCase { assertEquals("LEFT", uf.functionName()); } + public void testLiteralDouble() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Double.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Double.MAX_VALUE, l.value()); + assertEquals(DataType.DOUBLE, l.dataType()); + } + + public void testLiteralDoubleNegative() throws Exception { + Expression lt = parser.createExpression(String.valueOf(Double.MIN_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Double.MIN_VALUE, l.value()); + assertEquals(DataType.DOUBLE, l.dataType()); + } + + public void testLiteralDoublePositive() throws Exception { + Expression lt = parser.createExpression("+" + Double.MAX_VALUE); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Double.MAX_VALUE, l.value()); + assertEquals(DataType.DOUBLE, l.dataType()); + } + public void testLiteralLong() throws Exception { Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); @@ -32,13 +55,18 @@ public class ExpressionTests extends ESTestCase { } public void testLiteralLongNegative() throws Exception { - // Long.MIN_VALUE doesn't work since it is being interpreted as negate positive.long which is 1 higher than Long.MAX_VALUE - Expression lt = parser.createExpression(String.valueOf(-Long.MAX_VALUE)); - assertEquals(Neg.class, lt.getClass()); - Neg n = (Neg) lt; - assertTrue(n.foldable()); - assertEquals(-Long.MAX_VALUE, n.fold()); - assertEquals(DataType.LONG, n.dataType()); + Expression lt = parser.createExpression(String.valueOf(Long.MIN_VALUE)); + assertTrue(lt.foldable()); + assertEquals(Long.MIN_VALUE, lt.fold()); + assertEquals(DataType.LONG, lt.dataType()); + } + + public void testLiteralLongPositive() throws Exception { + Expression lt = parser.createExpression("+" + String.valueOf(Long.MAX_VALUE)); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Long.MAX_VALUE, l.value()); + assertEquals(DataType.LONG, l.dataType()); } public void testLiteralInteger() throws Exception { From 2fa09f062e5fe4165b457fb2b42ff4b65acdb8c7 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 18 Sep 2018 10:25:27 +0100 Subject: [PATCH 15/32] New plugin - Annotated_text field type (#30364) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit New plugin for annotated_text field type. Largely a copy of `text` field type but adds ability to include markdown-like syntax in the text. The “AnnotatedText” class parses text+markup and converts into plain text and AnnotationTokens. The annotation token values are injected unchanged alongside the regular text tokens to provide a form of additional indexed overlay useful in positional searches and highlighting. Annotated_text fields do not support fielddata as we want to phase this out. Also includes a new "annotated" highlighter type that retains annotations and merges in search hits as additional annotation markup. Closes #29467 --- docs/plugins/mapper-annotated-text.asciidoc | 328 ++++++++ docs/plugins/mapper.asciidoc | 8 + docs/reference/cat/plugins.asciidoc | 1 + docs/reference/mapping/types.asciidoc | 1 + plugins/mapper-annotated-text/build.gradle | 23 + .../AnnotatedTextFieldMapper.java | 776 ++++++++++++++++++ .../plugin/mapper/AnnotatedTextPlugin.java | 44 + .../highlight/AnnotatedPassageFormatter.java | 201 +++++ .../highlight/AnnotatedTextHighlighter.java | 64 ++ .../AnnotatedTextClientYamlTestSuiteIT.java | 39 + .../AnnotatedTextFieldMapperTests.java | 681 +++++++++++++++ .../AnnotatedTextParsingTests.java | 73 ++ .../AnnotatedTextHighlighterTests.java | 185 +++++ .../test/mapper_annotatedtext/10_basic.yml | 44 + .../tests/module_and_plugin_test_cases.bash | 8 + .../subphase/highlight/HighlightUtils.java | 19 +- .../subphase/highlight/PlainHighlighter.java | 3 +- .../highlight/UnifiedHighlighter.java | 55 +- 18 files changed, 2523 insertions(+), 30 deletions(-) create mode 100644 docs/plugins/mapper-annotated-text.asciidoc create mode 100644 plugins/mapper-annotated-text/build.gradle create mode 100644 plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java create mode 100644 plugins/mapper-annotated-text/src/main/java/org/elasticsearch/plugin/mapper/AnnotatedTextPlugin.java create mode 100644 plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedPassageFormatter.java create mode 100644 plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java create mode 100644 plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java create mode 100644 plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java create mode 100644 plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextParsingTests.java create mode 100644 plugins/mapper-annotated-text/src/test/java/org/elasticsearch/search/highlight/AnnotatedTextHighlighterTests.java create mode 100644 plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml diff --git a/docs/plugins/mapper-annotated-text.asciidoc b/docs/plugins/mapper-annotated-text.asciidoc new file mode 100644 index 00000000000..4528168a4d6 --- /dev/null +++ b/docs/plugins/mapper-annotated-text.asciidoc @@ -0,0 +1,328 @@ +[[mapper-annotated-text]] +=== Mapper Annotated Text Plugin + +experimental[] + +The mapper-annotated-text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + + +The elasticsearch markup allows one or more additional tokens to be injected, unchanged, into the token +stream at the same position as the underlying text it annotates. + +:plugin_name: mapper-annotated-text +include::install_remove.asciidoc[] + +[[mapper-annotated-text-usage]] +==== Using the `annotated-text` field + +The `annotated-text` tokenizes text content as per the more common `text` field (see +"limitations" below) but also injects any marked-up annotation tokens directly into +the search index: + +[source,js] +-------------------------- +PUT my_index +{ + "mappings": { + "_doc": { + "properties": { + "my_field": { + "type": "annotated_text" + } + } + } + } +} +-------------------------- +// CONSOLE + +Such a mapping would allow marked-up text eg wikipedia articles to be indexed as both text +and structured tokens. The annotations use a markdown-like syntax using URL encoding of +one or more values separated by the `&` symbol. + + +We can use the "_analyze" api to test how an example annotation would be stored as tokens +in the search index: + + +[source,js] +-------------------------- +GET my_index/_analyze +{ + "field": "my_field", + "text":"Investors in [Apple](Apple+Inc.) rejoiced." +} +-------------------------- +// NOTCONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "investors", + "start_offset": 0, + "end_offset": 9, + "type": "", + "position": 0 + }, + { + "token": "in", + "start_offset": 10, + "end_offset": 12, + "type": "", + "position": 1 + }, + { + "token": "Apple Inc.", <1> + "start_offset": 13, + "end_offset": 18, + "type": "annotation", + "position": 2 + }, + { + "token": "apple", + "start_offset": 13, + "end_offset": 18, + "type": "", + "position": 2 + }, + { + "token": "rejoiced", + "start_offset": 19, + "end_offset": 27, + "type": "", + "position": 3 + } + ] +} +-------------------------------------------------- +// NOTCONSOLE + +<1> Note the whole annotation token `Apple Inc.` is placed, unchanged as a single token in +the token stream and at the same position (position 2) as the text token (`apple`) it annotates. + + +We can now perform searches for annotations using regular `term` queries that don't tokenize +the provided search values. Annotations are a more precise way of matching as can be seen +in this example where a search for `Beck` will not match `Jeff Beck` : + +[source,js] +-------------------------- +# Example documents +PUT my_index/_doc/1 +{ + "my_field": "[Beck](Beck) announced a new tour"<2> +} + +PUT my_index/_doc/2 +{ + "my_field": "[Jeff Beck](Jeff+Beck&Guitarist) plays a strat"<1> +} + +# Example search +GET my_index/_search +{ + "query": { + "term": { + "my_field": "Beck" <3> + } + } +} +-------------------------- +// CONSOLE + +<1> As well as tokenising the plain text into single words e.g. `beck`, here we +inject the single token value `Beck` at the same position as `beck` in the token stream. +<2> Note annotations can inject multiple tokens at the same position - here we inject both +the very specific value `Jeff Beck` and the broader term `Guitarist`. This enables +broader positional queries e.g. finding mentions of a `Guitarist` near to `strat`. +<3> A benefit of searching with these carefully defined annotation tokens is that a query for +`Beck` will not match document 2 that contains the tokens `jeff`, `beck` and `Jeff Beck` + +WARNING: Any use of `=` signs in annotation values eg `[Prince](person=Prince)` will +cause the document to be rejected with a parse failure. In future we hope to have a use for +the equals signs so wil actively reject documents that contain this today. + + +[[mapper-annotated-text-tips]] +==== Data modelling tips +===== Use structured and unstructured fields + +Annotations are normally a way of weaving structured information into unstructured text for +higher-precision search. + +`Entity resolution` is a form of document enrichment undertaken by specialist software or people +where references to entities in a document are disambiguated by attaching a canonical ID. +The ID is used to resolve any number of aliases or distinguish between people with the +same name. The hyperlinks connecting Wikipedia's articles are a good example of resolved +entity IDs woven into text. + +These IDs can be embedded as annotations in an annotated_text field but it often makes +sense to include them in dedicated structured fields to support discovery via aggregations: + +[source,js] +-------------------------- +PUT my_index +{ + "mappings": { + "_doc": { + "properties": { + "my_unstructured_text_field": { + "type": "annotated_text" + }, + "my_structured_people_field": { + "type": "text", + "fields": { + "keyword" :{ + "type": "keyword" + } + } + } + } + } + } +} +-------------------------- +// CONSOLE + +Applications would then typically provide content and discover it as follows: + +[source,js] +-------------------------- +# Example documents +PUT my_index/_doc/1 +{ + "my_unstructured_text_field": "[Shay](%40kimchy) created elasticsearch", + "my_twitter_handles": ["@kimchy"] <1> +} + +GET my_index/_search +{ + "query": { + "query_string": { + "query": "elasticsearch OR logstash OR kibana",<2> + "default_field": "my_unstructured_text_field" + } + }, + "aggregations": { + "top_people" :{ + "significant_terms" : { <3> + "field" : "my_twitter_handles.keyword" + } + } + } +} +-------------------------- +// CONSOLE + +<1> Note the `my_twitter_handles` contains a list of the annotation values +also used in the unstructured text. (Note the annotated_text syntax requires escaping). +By repeating the annotation values in a structured field this application has ensured that +the tokens discovered in the structured field can be used for search and highlighting +in the unstructured field. +<2> In this example we search for documents that talk about components of the elastic stack +<3> We use the `my_twitter_handles` field here to discover people who are significantly +associated with the elastic stack. + +===== Avoiding over-matching annotations +By design, the regular text tokens and the annotation tokens co-exist in the same indexed +field but in rare cases this can lead to some over-matching. + +The value of an annotation often denotes a _named entity_ (a person, place or company). +The tokens for these named entities are inserted untokenized, and differ from typical text +tokens because they are normally: + +* Mixed case e.g. `Madonna` +* Multiple words e.g. `Jeff Beck` +* Can have punctuation or numbers e.g. `Apple Inc.` or `@kimchy` + +This means, for the most part, a search for a named entity in the annotated text field will +not have any false positives e.g. when selecting `Apple Inc.` from an aggregation result +you can drill down to highlight uses in the text without "over matching" on any text tokens +like the word `apple` in this context: + + the apple was very juicy + +However, a problem arises if your named entity happens to be a single term and lower-case e.g. the +company `elastic`. In this case, a search on the annotated text field for the token `elastic` +may match a text document such as this: + + he fired an elastic band + +To avoid such false matches users should consider prefixing annotation values to ensure +they don't name clash with text tokens e.g. + + [elastic](Company_elastic) released version 7.0 of the elastic stack today + + + + +[[mapper-annotated-text-highlighter]] +==== Using the `annotated` highlighter + +The `annotated-text` plugin includes a custom highlighter designed to mark up search hits +in a way which is respectful of the original markup: + +[source,js] +-------------------------- +# Example documents +PUT my_index/_doc/1 +{ + "my_field": "The cat sat on the [mat](sku3578)" +} + +GET my_index/_search +{ + "query": { + "query_string": { + "query": "cats" + } + }, + "highlight": { + "fields": { + "my_field": { + "type": "annotated", <1> + "require_field_match": false + } + } + } +} +-------------------------- +// CONSOLE +<1> The `annotated` highlighter type is designed for use with annotated_text fields + +The annotated highlighter is based on the `unified` highlighter and supports the same +settings but does not use the `pre_tags` or `post_tags` parameters. Rather than using +html-like markup such as `cat` the annotated highlighter uses the same +markdown-like syntax used for annotations and injects a key=value annotation where `_hit_term` +is the key and the matched search term is the value e.g. + + The [cat](_hit_term=cat) sat on the [mat](sku3578) + +The annotated highlighter tries to be respectful of any existing markup in the original +text: + +* If the search term matches exactly the location of an existing annotation then the +`_hit_term` key is merged into the url-like syntax used in the `(...)` part of the +existing annotation. +* However, if the search term overlaps the span of an existing annotation it would break +the markup formatting so the original annotation is removed in favour of a new annotation +with just the search hit information in the results. +* Any non-overlapping annotations in the original text are preserved in highlighter +selections + + +[[mapper-annotated-text-limitations]] +==== Limitations + +The annotated_text field type supports the same mapping settings as the `text` field type +but with the following exceptions: + +* No support for `fielddata` or `fielddata_frequency_filter` +* No support for `index_prefixes` or `index_phrases` indexing diff --git a/docs/plugins/mapper.asciidoc b/docs/plugins/mapper.asciidoc index 226fc4e40d0..4026a45c59e 100644 --- a/docs/plugins/mapper.asciidoc +++ b/docs/plugins/mapper.asciidoc @@ -19,5 +19,13 @@ indexes the size in bytes of the original The mapper-murmur3 plugin allows hashes to be computed at index-time and stored in the index for later use with the `cardinality` aggregation. +<>:: + +The annotated text plugin provides the ability to index text that is a +combination of free-text and special markup that is typically used to identify +items of interest such as people or organisations (see NER or Named Entity Recognition +tools). + include::mapper-size.asciidoc[] include::mapper-murmur3.asciidoc[] +include::mapper-annotated-text.asciidoc[] diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index a9915d7aaa2..9cb83321835 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -28,6 +28,7 @@ U7321H6 discovery-gce {version} The Google Compute Engine (GCE) Discov U7321H6 ingest-attachment {version} Ingest processor that uses Apache Tika to extract contents U7321H6 ingest-geoip {version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database U7321H6 ingest-user-agent {version} Ingest processor that extracts information from a user agent +U7321H6 mapper-annotated-text {version} The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index. U7321H6 mapper-murmur3 {version} The Mapper Murmur3 plugin allows to compute hashes of a field's values at index-time and to store them in the index. U7321H6 mapper-size {version} The Mapper Size plugin allows document to record their uncompressed size at index time. U7321H6 store-smb {version} The Store SMB plugin adds support for SMB stores. diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index fbd8181d095..9cd55bee855 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -35,6 +35,7 @@ string:: <> and <> `completion` to provide auto-complete suggestions <>:: `token_count` to count the number of tokens in a string {plugins}/mapper-murmur3.html[`mapper-murmur3`]:: `murmur3` to compute hashes of values at index-time and store them in the index +{plugins}/mapper-annotated-text.html[`mapper-annotated-text`]:: `annotated-text` to index text containing special markup (typically used for identifying named entities) <>:: Accepts queries from the query-dsl diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle new file mode 100644 index 00000000000..8ce1ca2a416 --- /dev/null +++ b/plugins/mapper-annotated-text/build.gradle @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'The Mapper Annotated_text plugin adds support for text fields with markup used to inject annotation tokens into the index.' + classname 'org.elasticsearch.plugin.mapper.AnnotatedTextPlugin' +} diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java new file mode 100644 index 00000000000..8cc38d130ff --- /dev/null +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapper.java @@ -0,0 +1,776 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.annotatedtext; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.Analyzer.TokenStreamComponents; +import org.apache.lucene.analysis.AnalyzerWrapper; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.NormsFieldExistsQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.index.analysis.AnalyzerScope; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.StringFieldType; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.io.UncheckedIOException; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; + +/** A {@link FieldMapper} for full-text fields with annotation markup e.g. + * + * "New mayor is [John Smith](type=person&value=John%20Smith) " + * + * A special Analyzer wraps the default choice of analyzer in order + * to strip the text field of annotation markup and inject the related + * entity annotation tokens as supplementary tokens at the relevant points + * in the token stream. + * This code is largely a copy of TextFieldMapper which is less than ideal - + * my attempts to subclass TextFieldMapper failed but we can revisit this. + **/ +public class AnnotatedTextFieldMapper extends FieldMapper { + + public static final String CONTENT_TYPE = "annotated_text"; + private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; + + public static class Defaults { + public static final MappedFieldType FIELD_TYPE = new AnnotatedTextFieldType(); + static { + FIELD_TYPE.freeze(); + } + } + + public static class Builder extends FieldMapper.Builder { + + private int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER; + + public Builder(String name) { + super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); + builder = this; + } + + @Override + public AnnotatedTextFieldType fieldType() { + return (AnnotatedTextFieldType) super.fieldType(); + } + + public Builder positionIncrementGap(int positionIncrementGap) { + if (positionIncrementGap < 0) { + throw new MapperParsingException("[positions_increment_gap] must be positive, got " + positionIncrementGap); + } + this.positionIncrementGap = positionIncrementGap; + return this; + } + + @Override + public Builder docValues(boolean docValues) { + if (docValues) { + throw new IllegalArgumentException("[" + CONTENT_TYPE + "] fields do not support doc values"); + } + return super.docValues(docValues); + } + + @Override + public AnnotatedTextFieldMapper build(BuilderContext context) { + if (fieldType().indexOptions() == IndexOptions.NONE ) { + throw new IllegalArgumentException("[" + CONTENT_TYPE + "] fields must be indexed"); + } + if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { + if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { + throw new IllegalArgumentException("Cannot set position_increment_gap on field [" + + name + "] without positions enabled"); + } + fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap)); + fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap)); + fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); + } else { + //Using the analyzer's default BUT need to do the same thing AnalysisRegistry.processAnalyzerFactory + // does to splice in new default of posIncGap=100 by wrapping the analyzer + if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) { + int overrideInc = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP; + fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), overrideInc)); + fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), overrideInc)); + fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(),overrideInc)); + } + } + setupFieldType(context); + return new AnnotatedTextFieldMapper( + name, fieldType(), defaultFieldType, positionIncrementGap, + context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + } + } + + public static class TypeParser implements Mapper.TypeParser { + @Override + public Mapper.Builder parse( + String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { + AnnotatedTextFieldMapper.Builder builder = new AnnotatedTextFieldMapper.Builder(fieldName); + + builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); + builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); + builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()); + parseTextField(builder, fieldName, node, parserContext); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = entry.getKey(); + Object propNode = entry.getValue(); + if (propName.equals("position_increment_gap")) { + int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); + builder.positionIncrementGap(newPositionIncrementGap); + iterator.remove(); + } + } + return builder; + } + } + + + /** + * Parses markdown-like syntax into plain text and AnnotationTokens with offsets for + * annotations found in texts + */ + public static final class AnnotatedText { + public final String textPlusMarkup; + public final String textMinusMarkup; + List annotations; + + // Format is markdown-like syntax for URLs eg: + // "New mayor is [John Smith](type=person&value=John%20Smith) " + static Pattern markdownPattern = Pattern.compile("\\[([^\\]\\[]*)\\]\\(([^\\)\\(]*)\\)"); + + public static AnnotatedText parse (String textPlusMarkup) { + List annotations =new ArrayList<>(); + Matcher m = markdownPattern.matcher(textPlusMarkup); + int lastPos = 0; + StringBuilder sb = new StringBuilder(); + while(m.find()){ + if(m.start() > lastPos){ + sb.append(textPlusMarkup.substring(lastPos, m.start())); + } + + int startOffset = sb.length(); + int endOffset = sb.length() + m.group(1).length(); + sb.append(m.group(1)); + lastPos = m.end(); + + String[] pairs = m.group(2).split("&"); + String value = null; + for (String pair : pairs) { + String[] kv = pair.split("="); + try { + if(kv.length == 2){ + throw new ElasticsearchParseException("key=value pairs are not supported in annotations"); + } + if(kv.length == 1) { + //Check "=" sign wasn't in the pair string + if(kv[0].length() == pair.length()) { + //untyped value + value = URLDecoder.decode(kv[0], "UTF-8"); + } + } + if (value!=null && value.length() > 0) { + annotations.add(new AnnotationToken(startOffset, endOffset, value)); + } + } catch (UnsupportedEncodingException uee){ + throw new ElasticsearchParseException("Unsupported encoding parsing annotated text", uee); + } + } + } + if(lastPos < textPlusMarkup.length()){ + sb.append(textPlusMarkup.substring(lastPos)); + } + return new AnnotatedText(sb.toString(), textPlusMarkup, annotations); + } + + protected AnnotatedText(String textMinusMarkup, String textPlusMarkup, List annotations) { + this.textMinusMarkup = textMinusMarkup; + this.textPlusMarkup = textPlusMarkup; + this.annotations = annotations; + } + + public static final class AnnotationToken { + public final int offset; + public final int endOffset; + + public final String value; + public AnnotationToken(int offset, int endOffset, String value) { + this.offset = offset; + this.endOffset = endOffset; + this.value = value; + } + @Override + public String toString() { + return value +" ("+offset+" - "+endOffset+")"; + } + + public boolean intersects(int start, int end) { + return (start <= offset && end >= offset) || (start <= endOffset && end >= endOffset) + || (start >= offset && end <= endOffset); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + endOffset; + result = prime * result + offset; + result = prime * result + Objects.hashCode(value); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + AnnotationToken other = (AnnotationToken) obj; + return Objects.equals(endOffset, other.endOffset) && Objects.equals(offset, other.offset) + && Objects.equals(value, other.value); + } + + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(textMinusMarkup); + sb.append("\n"); + annotations.forEach(a -> {sb.append(a); sb.append("\n");}); + return sb.toString(); + } + + public int numAnnotations() { + return annotations.size(); + } + + public AnnotationToken getAnnotation(int index) { + return annotations.get(index); + } + } + + // A utility class for use with highlighters where the content being highlighted + // needs plain text format for highlighting but marked-up format for token discovery. + // The class takes markedup format field values and returns plain text versions. + // When asked to tokenize plain-text versions by the highlighter it tokenizes the + // original markup form in order to inject annotations. + public static final class AnnotatedHighlighterAnalyzer extends AnalyzerWrapper { + private Analyzer delegate; + private AnnotatedText[] annotations; + public AnnotatedHighlighterAnalyzer(Analyzer delegate){ + super(delegate.getReuseStrategy()); + this.delegate = delegate; + } + + public void init(String[] markedUpFieldValues) { + this.annotations = new AnnotatedText[markedUpFieldValues.length]; + for (int i = 0; i < markedUpFieldValues.length; i++) { + annotations[i] = AnnotatedText.parse(markedUpFieldValues[i]); + } + } + + public String [] getPlainTextValuesForHighlighter(){ + String [] result = new String[annotations.length]; + for (int i = 0; i < annotations.length; i++) { + result[i] = annotations[i].textMinusMarkup; + } + return result; + } + + public AnnotationToken[] getIntersectingAnnotations(int start, int end) { + List intersectingAnnotations = new ArrayList<>(); + int fieldValueOffset =0; + for (AnnotatedText fieldValueAnnotations : this.annotations) { + //This is called from a highlighter where all of the field values are concatenated + // so each annotation offset will need to be adjusted so that it takes into account + // the previous values AND the MULTIVAL delimiter + for (AnnotationToken token : fieldValueAnnotations.annotations) { + if(token.intersects(start - fieldValueOffset , end - fieldValueOffset)) { + intersectingAnnotations.add(new AnnotationToken(token.offset + fieldValueOffset, + token.endOffset + fieldValueOffset, token.value)); + } + } + //add 1 for the fieldvalue separator character + fieldValueOffset +=fieldValueAnnotations.textMinusMarkup.length() +1; + } + return intersectingAnnotations.toArray(new AnnotationToken[intersectingAnnotations.size()]); + } + + @Override + public Analyzer getWrappedAnalyzer(String fieldName) { + return delegate; + } + + @Override + protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) { + if(components instanceof AnnotatedHighlighterTokenStreamComponents){ + // already wrapped. + return components; + } + AnnotationsInjector injector = new AnnotationsInjector(components.getTokenStream()); + return new AnnotatedHighlighterTokenStreamComponents(components.getTokenizer(), injector, this.annotations); + } + } + private static final class AnnotatedHighlighterTokenStreamComponents extends TokenStreamComponents{ + + private AnnotationsInjector annotationsInjector; + private AnnotatedText[] annotations; + int readerNum = 0; + + AnnotatedHighlighterTokenStreamComponents(Tokenizer source, AnnotationsInjector annotationsFilter, + AnnotatedText[] annotations) { + super(source, annotationsFilter); + this.annotationsInjector = annotationsFilter; + this.annotations = annotations; + } + + @Override + protected void setReader(Reader reader) { + String plainText = readToString(reader); + AnnotatedText at = this.annotations[readerNum++]; + assert at.textMinusMarkup.equals(plainText); + // This code is reliant on the behaviour of highlighter logic - it + // takes plain text multi-value fields and then calls the same analyzer + // for each field value in turn. This class has cached the annotations + // associated with each plain-text value and are arranged in the same order + annotationsInjector.setAnnotations(at); + super.setReader(new StringReader(at.textMinusMarkup)); + } + + } + + + public static final class AnnotationAnalyzerWrapper extends AnalyzerWrapper { + + + private final Analyzer delegate; + + public AnnotationAnalyzerWrapper (Analyzer delegate) { + super(delegate.getReuseStrategy()); + this.delegate = delegate; + } + + /** + * Wraps {@link StandardAnalyzer}. + */ + public AnnotationAnalyzerWrapper() { + this(new StandardAnalyzer()); + } + + + @Override + public Analyzer getWrappedAnalyzer(String fieldName) { + return delegate; + } + + @Override + protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) { + if(components instanceof AnnotatedTokenStreamComponents){ + // already wrapped. + return components; + } + AnnotationsInjector injector = new AnnotationsInjector(components.getTokenStream()); + return new AnnotatedTokenStreamComponents(components.getTokenizer(), injector); + } + } + + + //This Analyzer is not "wrappable" because of a limitation in Lucene https://issues.apache.org/jira/browse/LUCENE-8352 + private static final class AnnotatedTokenStreamComponents extends TokenStreamComponents{ + private AnnotationsInjector annotationsInjector; + + AnnotatedTokenStreamComponents(Tokenizer source, AnnotationsInjector annotationsInjector) { + super(source, annotationsInjector); + this.annotationsInjector = annotationsInjector; + } + + @Override + protected void setReader(Reader reader) { + // Sneaky code to change the content downstream components will parse. + // Replace the marked-up content Reader with a plain text Reader and prime the + // annotations injector with the AnnotatedTokens that need to be injected + // as plain-text parsing progresses. + AnnotatedText annotations = AnnotatedText.parse(readToString(reader)); + annotationsInjector.setAnnotations(annotations); + super.setReader(new StringReader(annotations.textMinusMarkup)); + } + } + + static String readToString(Reader reader) { + char[] arr = new char[8 * 1024]; + StringBuilder buffer = new StringBuilder(); + int numCharsRead; + try { + while ((numCharsRead = reader.read(arr, 0, arr.length)) != -1) { + buffer.append(arr, 0, numCharsRead); + } + reader.close(); + return buffer.toString(); + } catch (IOException e) { + throw new UncheckedIOException("IO Error reading field content", e); + } + } + + + public static final class AnnotationsInjector extends TokenFilter { + + private AnnotatedText annotatedText; + AnnotatedText.AnnotationToken nextAnnotationForInjection = null; + private int currentAnnotationIndex = 0; + List pendingStates = new ArrayList<>(); + int pendingStatePos = 0; + boolean inputExhausted = false; + + private final OffsetAttribute textOffsetAtt = addAttribute(OffsetAttribute.class); + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final PositionIncrementAttribute posAtt = addAttribute(PositionIncrementAttribute.class); + private final PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); + private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class); + + public AnnotationsInjector(TokenStream in) { + super(in); + } + + public void setAnnotations(AnnotatedText annotatedText) { + this.annotatedText = annotatedText; + currentAnnotationIndex = 0; + if(annotatedText!=null && annotatedText.numAnnotations()>0){ + nextAnnotationForInjection = annotatedText.getAnnotation(0); + } else { + nextAnnotationForInjection = null; + } + } + + + + @Override + public void reset() throws IOException { + pendingStates.clear(); + pendingStatePos = 0; + inputExhausted = false; + super.reset(); + } + + // Abstracts if we are pulling from some pre-cached buffer of + // text tokens or directly from the wrapped TokenStream + private boolean internalNextToken() throws IOException{ + if (pendingStatePos < pendingStates.size()){ + restoreState(pendingStates.get(pendingStatePos)); + pendingStatePos ++; + if(pendingStatePos >=pendingStates.size()){ + pendingStatePos =0; + pendingStates.clear(); + } + return true; + } + if(inputExhausted) { + return false; + } + return input.incrementToken(); + } + + @Override + public boolean incrementToken() throws IOException { + if (internalNextToken()) { + if (nextAnnotationForInjection != null) { + // If we are at the right point to inject an annotation.... + if (textOffsetAtt.startOffset() >= nextAnnotationForInjection.offset) { + int firstSpannedTextPosInc = posAtt.getPositionIncrement(); + int annotationPosLen = 1; + + // Capture the text token's state for later replay - but + // with a zero pos increment so is same as annotation + // that is injected before it + posAtt.setPositionIncrement(0); + pendingStates.add(captureState()); + + while (textOffsetAtt.endOffset() <= nextAnnotationForInjection.endOffset) { + // Buffer up all the other tokens spanned by this annotation to determine length. + if (input.incrementToken()) { + if (textOffsetAtt.endOffset() <= nextAnnotationForInjection.endOffset + && textOffsetAtt.startOffset() < nextAnnotationForInjection.endOffset) { + annotationPosLen += posAtt.getPositionIncrement(); + } + pendingStates.add(captureState()); + } else { + inputExhausted = true; + break; + } + } + emitAnnotation(firstSpannedTextPosInc, annotationPosLen); + return true; + } + } + return true; + } else { + inputExhausted = true; + return false; + } + } + private void setType(AnnotationToken token) { + //Default annotation type - in future AnnotationTokens may contain custom type info + typeAtt.setType("annotation"); + } + + private void emitAnnotation(int firstSpannedTextPosInc, int annotationPosLen) throws IOException { + // Set the annotation's attributes + posLenAtt.setPositionLength(annotationPosLen); + textOffsetAtt.setOffset(nextAnnotationForInjection.offset, nextAnnotationForInjection.endOffset); + setType(nextAnnotationForInjection); + + // We may have multiple annotations at this location - stack them up + final int annotationOffset = nextAnnotationForInjection.offset; + final AnnotatedText.AnnotationToken firstAnnotationAtThisPos = nextAnnotationForInjection; + while (nextAnnotationForInjection != null && nextAnnotationForInjection.offset == annotationOffset) { + + + setType(nextAnnotationForInjection); + termAtt.resizeBuffer(nextAnnotationForInjection.value.length()); + termAtt.copyBuffer(nextAnnotationForInjection.value.toCharArray(), 0, nextAnnotationForInjection.value.length()); + + if (nextAnnotationForInjection == firstAnnotationAtThisPos) { + posAtt.setPositionIncrement(firstSpannedTextPosInc); + //Put at the head of the queue of tokens to be emitted + pendingStates.add(0, captureState()); + } else { + posAtt.setPositionIncrement(0); + //Put after the head of the queue of tokens to be emitted + pendingStates.add(1, captureState()); + } + + + // Flag the inject annotation as null to prevent re-injection. + currentAnnotationIndex++; + if (currentAnnotationIndex < annotatedText.numAnnotations()) { + nextAnnotationForInjection = annotatedText.getAnnotation(currentAnnotationIndex); + } else { + nextAnnotationForInjection = null; + } + } + // Now pop the first of many potential buffered tokens: + internalNextToken(); + } + + } + + + public static final class AnnotatedTextFieldType extends StringFieldType { + + public AnnotatedTextFieldType() { + setTokenized(true); + } + + protected AnnotatedTextFieldType(AnnotatedTextFieldType ref) { + super(ref); + } + + @Override + public void setIndexAnalyzer(NamedAnalyzer delegate) { + if(delegate.analyzer() instanceof AnnotationAnalyzerWrapper){ + // Already wrapped the Analyzer with an AnnotationAnalyzer + super.setIndexAnalyzer(delegate); + } else { + // Wrap the analyzer with an AnnotationAnalyzer that will inject required annotations + super.setIndexAnalyzer(new NamedAnalyzer(delegate.name(), AnalyzerScope.INDEX, + new AnnotationAnalyzerWrapper(delegate.analyzer()))); + } + } + + public AnnotatedTextFieldType clone() { + return new AnnotatedTextFieldType(this); + } + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + @Override + public Query existsQuery(QueryShardContext context) { + if (omitNorms()) { + return new TermQuery(new Term(FieldNamesFieldMapper.NAME, name())); + } else { + return new NormsFieldExistsQuery(name()); + } + } + + @Override + public Query phraseQuery(String field, TokenStream stream, int slop, boolean enablePosIncrements) throws IOException { + PhraseQuery.Builder builder = new PhraseQuery.Builder(); + builder.setSlop(slop); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + int position = -1; + + stream.reset(); + while (stream.incrementToken()) { + if (enablePosIncrements) { + position += posIncrAtt.getPositionIncrement(); + } + else { + position += 1; + } + builder.add(new Term(field, termAtt.getBytesRef()), position); + } + + return builder.build(); + } + + @Override + public Query multiPhraseQuery(String field, TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { + + MultiPhraseQuery.Builder mpqb = new MultiPhraseQuery.Builder(); + mpqb.setSlop(slop); + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + int position = -1; + + List multiTerms = new ArrayList<>(); + stream.reset(); + while (stream.incrementToken()) { + int positionIncrement = posIncrAtt.getPositionIncrement(); + + if (positionIncrement > 0 && multiTerms.size() > 0) { + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + multiTerms.clear(); + } + position += positionIncrement; + multiTerms.add(new Term(field, termAtt.getBytesRef())); + } + + if (enablePositionIncrements) { + mpqb.add(multiTerms.toArray(new Term[0]), position); + } else { + mpqb.add(multiTerms.toArray(new Term[0])); + } + return mpqb.build(); + } + } + + private int positionIncrementGap; + protected AnnotatedTextFieldMapper(String simpleName, AnnotatedTextFieldType fieldType, MappedFieldType defaultFieldType, + int positionIncrementGap, + Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + assert fieldType.tokenized(); + assert fieldType.hasDocValues() == false; + this.positionIncrementGap = positionIncrementGap; + } + + @Override + protected AnnotatedTextFieldMapper clone() { + return (AnnotatedTextFieldMapper) super.clone(); + } + + public int getPositionIncrementGap() { + return this.positionIncrementGap; + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + final String value; + if (context.externalValueSet()) { + value = context.externalValue().toString(); + } else { + value = context.parser().textOrNull(); + } + + if (value == null) { + return; + } + + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { + Field field = new Field(fieldType().name(), value, fieldType()); + fields.add(field); + if (fieldType().omitNorms()) { + createFieldNamesField(context, fields); + } + } + } + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + @Override + public AnnotatedTextFieldType fieldType() { + return (AnnotatedTextFieldType) super.fieldType(); + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + doXContentAnalyzers(builder, includeDefaults); + + if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { + builder.field("position_increment_gap", positionIncrementGap); + } + } +} diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/plugin/mapper/AnnotatedTextPlugin.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/plugin/mapper/AnnotatedTextPlugin.java new file mode 100644 index 00000000000..c7abe5fb5f9 --- /dev/null +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/plugin/mapper/AnnotatedTextPlugin.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.mapper; + +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.fetch.subphase.highlight.AnnotatedTextHighlighter; +import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; + +public class AnnotatedTextPlugin extends Plugin implements MapperPlugin, SearchPlugin { + + @Override + public Map getMappers() { + return Collections.singletonMap(AnnotatedTextFieldMapper.CONTENT_TYPE, new AnnotatedTextFieldMapper.TypeParser()); + } + + @Override + public Map getHighlighters() { + return Collections.singletonMap(AnnotatedTextHighlighter.NAME, new AnnotatedTextHighlighter()); + } +} diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedPassageFormatter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedPassageFormatter.java new file mode 100644 index 00000000000..ad1acc85031 --- /dev/null +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedPassageFormatter.java @@ -0,0 +1,201 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase.highlight; + +import org.apache.lucene.search.highlight.Encoder; +import org.apache.lucene.search.uhighlight.Passage; +import org.apache.lucene.search.uhighlight.PassageFormatter; +import org.apache.lucene.search.uhighlight.Snippet; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken; + +import java.io.UnsupportedEncodingException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +/** + * Custom passage formatter that : + * 1) marks up search hits in markdown-like syntax for URLs ({@link Snippet}) + * 2) injects any annotations from the original text that don't conflict with search hit highlighting + */ +public class AnnotatedPassageFormatter extends PassageFormatter { + + + public static final String SEARCH_HIT_TYPE = "_hit_term"; + private final Encoder encoder; + private AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer; + + public AnnotatedPassageFormatter(AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer, Encoder encoder) { + this.annotatedHighlighterAnalyzer = annotatedHighlighterAnalyzer; + this.encoder = encoder; + } + + static class MarkupPassage { + List markups = new ArrayList<>(); + int lastMarkupEnd = -1; + + public void addUnlessOverlapping(Markup newMarkup) { + + // Fast exit. + if(newMarkup.start > lastMarkupEnd) { + markups.add(newMarkup); + lastMarkupEnd = newMarkup.end; + return; + } + + // Check to see if this new markup overlaps with any prior + int index=0; + for (Markup existingMarkup: markups) { + if(existingMarkup.samePosition(newMarkup)) { + existingMarkup.merge(newMarkup); + return; + } + if(existingMarkup.overlaps(newMarkup)) { + // existing markup wins - we throw away the new markup that would span this position + return; + } + // markup list is in start offset order so we can insert at this position then shift others right + if(existingMarkup.isAfter(newMarkup)) { + markups.add(index, newMarkup); + return; + } + index++; + } + markups.add(newMarkup); + lastMarkupEnd = newMarkup.end; + } + + } + static class Markup { + int start; + int end; + String metadata; + Markup(int start, int end, String metadata) { + super(); + this.start = start; + this.end = end; + this.metadata = metadata; + } + boolean isAfter(Markup other) { + return start > other.end; + } + void merge(Markup newMarkup) { + // metadata is key1=value&key2=value&.... syntax used for urls + assert samePosition(newMarkup); + metadata += "&" + newMarkup.metadata; + } + boolean samePosition(Markup other) { + return this.start == other.start && this.end == other.end; + } + boolean overlaps(Markup other) { + return (start<=other.start && end >= other.start) + || (start <= other.end && end >=other.end) + || (start>=other.start && end<=other.end); + } + @Override + public String toString() { + return "Markup [start=" + start + ", end=" + end + ", metadata=" + metadata + "]"; + } + + + } + // Merge original annotations and search hits into a single set of markups for each passage + static MarkupPassage mergeAnnotations(AnnotationToken [] annotations, Passage passage){ + try { + MarkupPassage markupPassage = new MarkupPassage(); + + // Add search hits first - they take precedence over any other markup + for (int i = 0; i < passage.getNumMatches(); i++) { + int start = passage.getMatchStarts()[i]; + int end = passage.getMatchEnds()[i]; + String searchTerm = passage.getMatchTerms()[i].utf8ToString(); + Markup markup = new Markup(start, end, SEARCH_HIT_TYPE+"="+URLEncoder.encode(searchTerm, StandardCharsets.UTF_8.name())); + markupPassage.addUnlessOverlapping(markup); + } + + // Now add original text's annotations - ignoring any that might conflict with the search hits markup. + for (AnnotationToken token: annotations) { + int start = token.offset; + int end = token.endOffset; + if(start >= passage.getStartOffset() && end<=passage.getEndOffset()) { + String escapedValue = URLEncoder.encode(token.value, StandardCharsets.UTF_8.name()); + Markup markup = new Markup(start, end, escapedValue); + markupPassage.addUnlessOverlapping(markup); + } + } + return markupPassage; + + } catch (UnsupportedEncodingException e) { + // We should always have UTF-8 support + throw new IllegalStateException(e); + } + } + + + @Override + public Snippet[] format(Passage[] passages, String content) { + Snippet[] snippets = new Snippet[passages.length]; + + int pos; + int j = 0; + for (Passage passage : passages) { + AnnotationToken [] annotations = annotatedHighlighterAnalyzer.getIntersectingAnnotations(passage.getStartOffset(), + passage.getEndOffset()); + MarkupPassage mergedMarkup = mergeAnnotations(annotations, passage); + + StringBuilder sb = new StringBuilder(); + pos = passage.getStartOffset(); + for(Markup markup: mergedMarkup.markups) { + int start = markup.start; + int end = markup.end; + // its possible to have overlapping terms + if (start > pos) { + append(sb, content, pos, start); + } + if (end > pos) { + sb.append("["); + append(sb, content, Math.max(pos, start), end); + + sb.append("]("); + sb.append(markup.metadata); + sb.append(")"); + pos = end; + } + } + // its possible a "term" from the analyzer could span a sentence boundary. + append(sb, content, pos, Math.max(pos, passage.getEndOffset())); + //we remove the paragraph separator if present at the end of the snippet (we used it as separator between values) + if (sb.charAt(sb.length() - 1) == HighlightUtils.PARAGRAPH_SEPARATOR) { + sb.deleteCharAt(sb.length() - 1); + } else if (sb.charAt(sb.length() - 1) == HighlightUtils.NULL_SEPARATOR) { + sb.deleteCharAt(sb.length() - 1); + } + //and we trim the snippets too + snippets[j++] = new Snippet(sb.toString().trim(), passage.getScore(), passage.getNumMatches() > 0); + } + return snippets; + } + + private void append(StringBuilder dest, String content, int start, int end) { + dest.append(encoder.encodeText(content.substring(start, end))); + } +} diff --git a/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java new file mode 100644 index 00000000000..d93316c7892 --- /dev/null +++ b/plugins/mapper-annotated-text/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AnnotatedTextHighlighter.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase.highlight; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.search.highlight.Encoder; +import org.apache.lucene.search.uhighlight.PassageFormatter; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer; +import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; +import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +public class AnnotatedTextHighlighter extends UnifiedHighlighter { + + public static final String NAME = "annotated"; + + AnnotatedHighlighterAnalyzer annotatedHighlighterAnalyzer = null; + + @Override + protected Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { + annotatedHighlighterAnalyzer = new AnnotatedHighlighterAnalyzer(super.getAnalyzer(docMapper, type)); + return annotatedHighlighterAnalyzer; + } + + // Convert the marked-up values held on-disk to plain-text versions for highlighting + @Override + protected List loadFieldValues(MappedFieldType fieldType, Field field, SearchContext context, HitContext hitContext) + throws IOException { + List fieldValues = super.loadFieldValues(fieldType, field, context, hitContext); + String[] fieldValuesAsString = fieldValues.toArray(new String[fieldValues.size()]); + annotatedHighlighterAnalyzer.init(fieldValuesAsString); + return Arrays.asList((Object[]) annotatedHighlighterAnalyzer.getPlainTextValuesForHighlighter()); + } + + @Override + protected PassageFormatter getPassageFormatter(SearchContextHighlight.Field field, Encoder encoder) { + return new AnnotatedPassageFormatter(annotatedHighlighterAnalyzer, encoder); + + } + +} diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java new file mode 100644 index 00000000000..3d643b2a7ca --- /dev/null +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextClientYamlTestSuiteIT.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.annotatedtext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; + +public class AnnotatedTextClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { + + public AnnotatedTextClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return createParameters(); + } +} + diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java new file mode 100644 index 00000000000..8a51b9a494b --- /dev/null +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -0,0 +1,681 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.annotatedtext; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.termvectors.TermVectorsRequest; +import org.elasticsearch.action.termvectors.TermVectorsResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.termvectors.TermVectorsService; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.plugin.mapper.AnnotatedTextPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase { + + IndexService indexService; + DocumentMapperParser parser; + + @Before + public void setup() { + Settings settings = Settings.builder() + .put("index.analysis.filter.mySynonyms.type", "synonym") + .putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto")) + .put("index.analysis.analyzer.synonym.tokenizer", "standard") + .put("index.analysis.analyzer.synonym.filter", "mySynonyms") + // Stop filter remains in server as it is part of lucene-core + .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard") + .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop") + .build(); + indexService = createIndex("test", settings); + parser = indexService.mapperService().documentMapperParser(); + } + + + + @Override + protected Collection> getPlugins() { + List> classpathPlugins = new ArrayList<>(); + classpathPlugins.add(AnnotatedTextPlugin.class); + return classpathPlugins; + } + + + + protected String getFieldType() { + return "annotated_text"; + } + + public void testAnnotationInjection() throws IOException { + + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = indexService.mapperService().merge("type", + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + // Use example of typed and untyped annotations + String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money"; + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", annotatedText) + .endObject()), + XContentType.JSON); + ParsedDocument doc = mapper.parse(sourceToParse); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + + assertEquals(annotatedText, fields[0].stringValue()); + + IndexShard shard = indexService.getShard(0); + shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, + sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + shard.refresh("test"); + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); + TermsEnum terms = leaf.terms("field").iterator(); + + assertTrue(terms.seekExact(new BytesRef("stormy"))); + PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(2, postings.nextPosition()); + + assertTrue(terms.seekExact(new BytesRef("Stephanie Clifford"))); + postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(2, postings.nextPosition()); + + assertTrue(terms.seekExact(new BytesRef("Payee"))); + postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(2, postings.nextPosition()); + + + assertTrue(terms.seekExact(new BytesRef("hush"))); + postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(4, postings.nextPosition()); + + } + } + + public void testToleranceForBadAnnotationMarkup() throws IOException { + + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = indexService.mapperService().merge("type", + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + String annotatedText = "foo [bar](MissingEndBracket baz"; + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", annotatedText) + .endObject()), + XContentType.JSON); + ParsedDocument doc = mapper.parse(sourceToParse); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + + assertEquals(annotatedText, fields[0].stringValue()); + + IndexShard shard = indexService.getShard(0); + shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, + sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + shard.refresh("test"); + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); + TermsEnum terms = leaf.terms("field").iterator(); + + assertTrue(terms.seekExact(new BytesRef("foo"))); + PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(0, postings.nextPosition()); + + assertTrue(terms.seekExact(new BytesRef("bar"))); + postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(1, postings.nextPosition()); + + assertFalse(terms.seekExact(new BytesRef("MissingEndBracket"))); + // Bad markup means value is treated as plain text and fed through tokenisation + assertTrue(terms.seekExact(new BytesRef("missingendbracket"))); + + } + } + + public void testAgainstTermVectorsAPI() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("tvfield").field("type", getFieldType()) + .field("term_vector", "with_positions_offsets_payloads") + .endObject().endObject() + .endObject().endObject()); + indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + + int max = between(3, 10); + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < max; i++) { + bulk.add(client().prepareIndex("test", "type", Integer.toString(i)) + .setSource("tvfield", "the quick [brown](Color) fox jumped over the lazy dog")); + } + bulk.get(); + + TermVectorsRequest request = new TermVectorsRequest("test", "type", "0").termStatistics(true); + + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + IndexShard shard = test.getShardOrNull(0); + assertThat(shard, notNullValue()); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(1, response.getFields().size()); + + Terms terms = response.getFields().terms("tvfield"); + TermsEnum iterator = terms.iterator(); + BytesRef term; + Set foundTerms = new HashSet<>(); + while ((term = iterator.next()) != null) { + foundTerms.add(term.utf8ToString()); + } + //Check we have both text and annotation tokens + assertTrue(foundTerms.contains("brown")); + assertTrue(foundTerms.contains("Color")); + assertTrue(foundTerms.contains("fox")); + + } + + // ===== Code below copied from TextFieldMapperTests ======== + + public void testDefaults() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), + XContentType.JSON)); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + + assertEquals("1234", fields[0].stringValue()); + IndexableFieldType fieldType = fields[0].fieldType(); + assertThat(fieldType.omitNorms(), equalTo(false)); + assertTrue(fieldType.tokenized()); + assertFalse(fieldType.stored()); + assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)); + assertThat(fieldType.storeTermVectors(), equalTo(false)); + assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); + assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); + assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); + assertEquals(DocValuesType.NONE, fieldType.docValuesType()); + } + + public void testEnableStore() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", getFieldType()).field("store", true).endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), + XContentType.JSON)); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertTrue(fields[0].fieldType().stored()); + } + + public void testDisableNorms() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", getFieldType()) + .field("norms", false) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field", "1234") + .endObject()), + XContentType.JSON)); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertTrue(fields[0].fieldType().omitNorms()); + } + + public void testIndexOptions() throws IOException { + Map supportedOptions = new HashMap<>(); + supportedOptions.put("docs", IndexOptions.DOCS); + supportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS); + supportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + supportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + + XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties"); + for (String option : supportedOptions.keySet()) { + mappingBuilder.startObject(option).field("type", getFieldType()).field("index_options", option).endObject(); + } + String mapping = Strings.toString(mappingBuilder.endObject().endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + XContentBuilder jsonDoc = XContentFactory.jsonBuilder().startObject(); + for (String option : supportedOptions.keySet()) { + jsonDoc.field(option, "1234"); + } + ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(jsonDoc.endObject()), + XContentType.JSON)); + + for (Map.Entry entry : supportedOptions.entrySet()) { + String field = entry.getKey(); + IndexOptions options = entry.getValue(); + IndexableField[] fields = doc.rootDoc().getFields(field); + assertEquals(1, fields.length); + assertEquals(options, fields[0].fieldType().indexOptions()); + } + } + + public void testDefaultPositionIncrementGap() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = indexService.mapperService().merge("type", + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + assertEquals(mapping, mapper.mappingSource().toString()); + + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[] {"a", "b"}) + .endObject()), + XContentType.JSON); + ParsedDocument doc = mapper.parse(sourceToParse); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + + assertEquals("a", fields[0].stringValue()); + assertEquals("b", fields[1].stringValue()); + + IndexShard shard = indexService.getShard(0); + shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, + sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + shard.refresh("test"); + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); + TermsEnum terms = leaf.terms("field").iterator(); + assertTrue(terms.seekExact(new BytesRef("b"))); + PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(TextFieldMapper.Defaults.POSITION_INCREMENT_GAP + 1, postings.nextPosition()); + } + } + + public void testPositionIncrementGap() throws IOException { + final int positionIncrementGap = randomIntBetween(1, 1000); + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", getFieldType()) + .field("position_increment_gap", positionIncrementGap) + .endObject().endObject() + .endObject().endObject()); + + DocumentMapper mapper = indexService.mapperService().merge("type", + new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); + + assertEquals(mapping, mapper.mappingSource().toString()); + + SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .array("field", new String[]{"a", "b"}) + .endObject()), + XContentType.JSON); + ParsedDocument doc = mapper.parse(sourceToParse); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + + assertEquals("a", fields[0].stringValue()); + assertEquals("b", fields[1].stringValue()); + + IndexShard shard = indexService.getShard(0); + shard.applyIndexOperationOnPrimary(Versions.MATCH_ANY, VersionType.INTERNAL, + sourceToParse, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); + shard.refresh("test"); + try (Engine.Searcher searcher = shard.acquireSearcher("test")) { + LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader(); + TermsEnum terms = leaf.terms("field").iterator(); + assertTrue(terms.seekExact(new BytesRef("b"))); + PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS); + assertEquals(0, postings.nextDoc()); + assertEquals(positionIncrementGap + 1, postings.nextPosition()); + } + } + + public void testSearchAnalyzerSerialization() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "standard") + .field("search_analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + // special case: default index analyzer + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "default") + .field("search_analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + // special case: default search analyzer + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "keyword") + .field("search_analyzer", "default") + .endObject() + .endObject().endObject().endObject()); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + mapper = parser.parse("type", new CompressedXContent(mapping)); + + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); + builder.endObject(); + + String mappingString = Strings.toString(builder); + assertTrue(mappingString.contains("analyzer")); + assertTrue(mappingString.contains("search_analyzer")); + assertTrue(mappingString.contains("search_quote_analyzer")); + } + + public void testSearchQuoteAnalyzerSerialization() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "standard") + .field("search_analyzer", "standard") + .field("search_quote_analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + // special case: default index/search analyzer + mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", "default") + .field("search_analyzer", "default") + .field("search_quote_analyzer", "keyword") + .endObject() + .endObject().endObject().endObject()); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + } + + public void testTermVectors() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("field1") + .field("type", getFieldType()) + .field("term_vector", "no") + .endObject() + .startObject("field2") + .field("type", getFieldType()) + .field("term_vector", "yes") + .endObject() + .startObject("field3") + .field("type", getFieldType()) + .field("term_vector", "with_offsets") + .endObject() + .startObject("field4") + .field("type", getFieldType()) + .field("term_vector", "with_positions") + .endObject() + .startObject("field5") + .field("type", getFieldType()) + .field("term_vector", "with_positions_offsets") + .endObject() + .startObject("field6") + .field("type", getFieldType()) + .field("term_vector", "with_positions_offsets_payloads") + .endObject() + .endObject() + .endObject().endObject()); + + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = defaultMapper.parse(SourceToParse.source("test", "type", "1", BytesReference + .bytes(XContentFactory.jsonBuilder() + .startObject() + .field("field1", "1234") + .field("field2", "1234") + .field("field3", "1234") + .field("field4", "1234") + .field("field5", "1234") + .field("field6", "1234") + .endObject()), + XContentType.JSON)); + + assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); + assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); + assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false)); + assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false)); + + assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true)); + assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false)); + assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false)); + assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false)); + + assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true)); + assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true)); + assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false)); + assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false)); + + assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true)); + assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false)); + assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true)); + assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false)); + + assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true)); + assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true)); + assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true)); + assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false)); + + assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true)); + assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true)); + assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); + assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); + } + + public void testNullConfigValuesFail() throws MapperParsingException, IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", getFieldType()) + .field("analyzer", (String) null) + .endObject() + .endObject() + .endObject().endObject()); + + Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("[analyzer] must not have a [null] value", e.getMessage()); + } + + public void testNotIndexedField() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", getFieldType()) + .field("index", false) + .endObject().endObject().endObject().endObject()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("[annotated_text] fields must be indexed", e.getMessage()); + } + + public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { + for (String indexOptions : Arrays.asList("docs", "freqs")) { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", getFieldType()) + .field("index_options", indexOptions) + .field("position_increment_gap", 10) + .endObject().endObject().endObject().endObject()); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); + } + } + + public void testEmptyName() throws IOException { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("") + .field("type", getFieldType()) + .endObject() + .endObject() + .endObject().endObject()); + + // Empty name not allowed in index created after 5.0 + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("name cannot be empty string")); + } + + + +} diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextParsingTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextParsingTests.java new file mode 100644 index 00000000000..4df44df5cd5 --- /dev/null +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextParsingTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.annotatedtext; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class AnnotatedTextParsingTests extends ESTestCase { + + private void checkParsing(String markup, String expectedPlainText, AnnotationToken... expectedTokens) { + AnnotatedText at = AnnotatedText.parse(markup); + assertEquals(expectedPlainText, at.textMinusMarkup); + List actualAnnotations = at.annotations; + assertEquals(expectedTokens.length, actualAnnotations.size()); + for (int i = 0; i < expectedTokens.length; i++) { + assertEquals(expectedTokens[i], actualAnnotations.get(i)); + } + } + + public void testSingleValueMarkup() { + checkParsing("foo [bar](Y)", "foo bar", new AnnotationToken(4,7,"Y")); + } + + public void testMultiValueMarkup() { + checkParsing("foo [bar](Y&B)", "foo bar", new AnnotationToken(4,7,"Y"), + new AnnotationToken(4,7,"B")); + } + + public void testBlankTextAnnotation() { + checkParsing("It sounded like this:[](theSoundOfOneHandClapping)", "It sounded like this:", + new AnnotationToken(21,21,"theSoundOfOneHandClapping")); + } + + public void testMissingBracket() { + checkParsing("[foo](MissingEndBracket bar", + "[foo](MissingEndBracket bar", new AnnotationToken[0]); + } + + public void testAnnotationWithType() { + Exception expectedException = expectThrows(ElasticsearchParseException.class, + () -> checkParsing("foo [bar](type=foo) baz", "foo bar baz", new AnnotationToken(4,7, "noType"))); + assertThat(expectedException.getMessage(), equalTo("key=value pairs are not supported in annotations")); + } + + public void testMissingValue() { + checkParsing("[foo]() bar", "foo bar", new AnnotationToken[0]); + } + + +} diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/search/highlight/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/search/highlight/AnnotatedTextHighlighterTests.java new file mode 100644 index 00000000000..2fcf917ab1d --- /dev/null +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/search/highlight/AnnotatedTextHighlighterTests.java @@ -0,0 +1,185 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.highlight; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.document.TextField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.highlight.DefaultEncoder; +import org.apache.lucene.search.uhighlight.CustomSeparatorBreakIterator; +import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; +import org.apache.lucene.search.uhighlight.PassageFormatter; +import org.apache.lucene.search.uhighlight.Snippet; +import org.apache.lucene.search.uhighlight.SplittingBreakIterator; +import org.apache.lucene.store.Directory; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer; +import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotationAnalyzerWrapper; +import org.elasticsearch.search.fetch.subphase.highlight.AnnotatedPassageFormatter; +import org.elasticsearch.test.ESTestCase; + +import java.net.URLEncoder; +import java.text.BreakIterator; +import java.util.Locale; + +import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR; +import static org.hamcrest.CoreMatchers.equalTo; + +public class AnnotatedTextHighlighterTests extends ESTestCase { + + private void assertHighlightOneDoc(String fieldName, String []markedUpInputs, + Query query, Locale locale, BreakIterator breakIterator, + int noMatchSize, String[] expectedPassages) throws Exception { + + // Annotated fields wrap the usual analyzer with one that injects extra tokens + Analyzer wrapperAnalyzer = new AnnotationAnalyzerWrapper(new StandardAnalyzer()); + AnnotatedHighlighterAnalyzer hiliteAnalyzer = new AnnotatedHighlighterAnalyzer(wrapperAnalyzer); + hiliteAnalyzer.init(markedUpInputs); + PassageFormatter passageFormatter = new AnnotatedPassageFormatter(hiliteAnalyzer,new DefaultEncoder()); + String []plainTextForHighlighter = hiliteAnalyzer.getPlainTextValuesForHighlighter(); + + + Directory dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(wrapperAnalyzer); + iwc.setMergePolicy(newTieredMergePolicy(random())); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + FieldType ft = new FieldType(TextField.TYPE_STORED); + if (randomBoolean()) { + ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + } else { + ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS); + } + ft.freeze(); + Document doc = new Document(); + for (String input : markedUpInputs) { + Field field = new Field(fieldName, "", ft); + field.setStringValue(input); + doc.add(field); + } + iw.addDocument(doc); + DirectoryReader reader = iw.getReader(); + IndexSearcher searcher = newSearcher(reader); + iw.close(); + TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); + assertThat(topDocs.totalHits.value, equalTo(1L)); + String rawValue = Strings.arrayToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); + + CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, hiliteAnalyzer, null, + passageFormatter, locale, + breakIterator, rawValue, noMatchSize); + highlighter.setFieldMatcher((name) -> "text".equals(name)); + final Snippet[] snippets = + highlighter.highlightField("text", query, topDocs.scoreDocs[0].doc, expectedPassages.length); + assertEquals(expectedPassages.length, snippets.length); + for (int i = 0; i < snippets.length; i++) { + assertEquals(expectedPassages[i], snippets[i].getText()); + } + reader.close(); + dir.close(); + } + + + public void testAnnotatedTextStructuredMatch() throws Exception { + // Check that a structured token eg a URL can be highlighted in a query + // on marked-up + // content using an "annotated_text" type field. + String url = "https://en.wikipedia.org/wiki/Key_Word_in_Context"; + String encodedUrl = URLEncoder.encode(url, "UTF-8"); + String annotatedWord = "[highlighting](" + encodedUrl + ")"; + String highlightedAnnotatedWord = "[highlighting](" + AnnotatedPassageFormatter.SEARCH_HIT_TYPE + "=" + encodedUrl + "&" + + encodedUrl + ")"; + final String[] markedUpInputs = { "This is a test. Just a test1 " + annotatedWord + " from [annotated](bar) highlighter.", + "This is the second " + annotatedWord + " value to perform highlighting on a longer text that gets scored lower." }; + + String[] expectedPassages = { + "This is a test. Just a test1 " + highlightedAnnotatedWord + " from [annotated](bar) highlighter.", + "This is the second " + highlightedAnnotatedWord + " value to perform highlighting on a" + + " longer text that gets scored lower." }; + Query query = new TermQuery(new Term("text", url)); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + + public void testAnnotatedTextOverlapsWithUnstructuredSearchTerms() throws Exception { + final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore", + "Donald duck is a [Disney](Disney+Inc) invention" }; + + String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore", + "[Donald](_hit_term=donald) duck is a [Disney](Disney+Inc) invention" }; + Query query = new TermQuery(new Term("text", "donald")); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + + public void testAnnotatedTextMultiFieldWithBreakIterator() throws Exception { + final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore. Kim shook hands with Donald", + "Donald duck is a [Disney](Disney+Inc) invention" }; + String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore", + "Kim shook hands with [Donald](_hit_term=donald)", + "[Donald](_hit_term=donald) duck is a [Disney](Disney+Inc) invention" }; + Query query = new TermQuery(new Term("text", "donald")); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + breakIterator = new SplittingBreakIterator(breakIterator, '.'); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + + public void testAnnotatedTextSingleFieldWithBreakIterator() throws Exception { + final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore. Kim shook hands with Donald"}; + String[] expectedPassages = { "[Donald](_hit_term=donald) Trump visited Singapore", + "Kim shook hands with [Donald](_hit_term=donald)"}; + Query query = new TermQuery(new Term("text", "donald")); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + breakIterator = new SplittingBreakIterator(breakIterator, '.'); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + + public void testAnnotatedTextSingleFieldWithPhraseQuery() throws Exception { + final String[] markedUpInputs = { "[Donald Trump](Donald+Trump) visited Singapore", + "Donald Jr was with Melania Trump"}; + String[] expectedPassages = { "[Donald](_hit_term=donald) [Trump](_hit_term=trump) visited Singapore"}; + Query query = new PhraseQuery("text", "donald", "trump"); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + + public void testBadAnnotation() throws Exception { + final String[] markedUpInputs = { "Missing bracket for [Donald Trump](Donald+Trump visited Singapore"}; + String[] expectedPassages = { "Missing bracket for [Donald Trump](Donald+Trump visited [Singapore](_hit_term=singapore)"}; + Query query = new TermQuery(new Term("text", "singapore")); + BreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); + assertHighlightOneDoc("text", markedUpInputs, query, Locale.ROOT, breakIterator, 0, expectedPassages); + } + +} diff --git a/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml new file mode 100644 index 00000000000..64e0b863bf9 --- /dev/null +++ b/plugins/mapper-annotated-text/src/test/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -0,0 +1,44 @@ +# Integration tests for Mapper Annotated_text components +# + +--- +"annotated highlighter on annotated text": + - skip: + version: " - 6.99.99" + reason: Annotated text type introduced in 7.0.0-alpha1 + + - do: + indices.create: + index: annotated + body: + settings: + number_of_shards: "1" + number_of_replicas: "0" + mappings: + doc: + properties: + text: + type: annotated_text + entityID: + type: keyword + + - do: + index: + index: annotated + type: doc + body: + "text" : "The [quick brown fox](entity_3789) is brown." + "entityID": "entity_3789" + refresh: true + + - do: + search: + body: { "query" : {"term" : { "entityID" : "entity_3789" } }, "highlight" : { "type" : "annotated", "require_field_match": false, "fields" : { "text" : {} } } } + + - match: {hits.hits.0.highlight.text.0: "The [quick brown fox](_hit_term=entity_3789&entity_3789) is brown."} + + - do: + search: + body: { "query" : {"term" : { "text" : "quick" } }, "highlight" : { "type" : "annotated", "require_field_match": false, "fields" : { "text" : {} } } } + + - match: {hits.hits.0.highlight.text.0: "The [quick](_hit_term=quick) brown fox is brown."} diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 8fd6bd9ad3f..7aeb03851a5 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -266,6 +266,10 @@ fi install_and_check_plugin mapper murmur3 } +@test "[$GROUP] install annotated-text mapper plugin" { + install_and_check_plugin mapper annotated-text +} + @test "[$GROUP] check reindex module" { check_module reindex } @@ -380,6 +384,10 @@ fi remove_plugin mapper-murmur3 } +@test "[$GROUP] remove annotated-text mapper plugin" { + remove_plugin mapper-annotated-text +} + @test "[$GROUP] remove size mapper plugin" { remove_plugin mapper-size } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java index c1c42fb45a4..6ae302ee87a 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightUtils.java @@ -18,10 +18,13 @@ */ package org.elasticsearch.search.fetch.subphase.highlight; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.search.highlight.DefaultEncoder; import org.apache.lucene.search.highlight.Encoder; import org.apache.lucene.search.highlight.SimpleHTMLEncoder; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -70,8 +73,18 @@ public final class HighlightUtils { return textsToHighlight; } - static class Encoders { - static final Encoder DEFAULT = new DefaultEncoder(); - static final Encoder HTML = new SimpleHTMLEncoder(); + public static class Encoders { + public static final Encoder DEFAULT = new DefaultEncoder(); + public static final Encoder HTML = new SimpleHTMLEncoder(); } + + static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { + if (type instanceof KeywordFieldMapper.KeywordFieldType) { + KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; + if (keywordFieldType.normalizer() != null) { + return keywordFieldType.normalizer(); + } + } + return docMapper.mappers().indexAnalyzer(); + } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index 1ac3f4789cb..ec5071706b0 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -49,7 +49,6 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter.convertFieldValue; -import static org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter.getAnalyzer; public class PlainHighlighter implements Highlighter { private static final String CACHE_KEY = "highlight-plain"; @@ -102,7 +101,7 @@ public class PlainHighlighter implements Highlighter { int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); + Analyzer analyzer = HighlightUtils.getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); try { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 2c9d482cab0..123e18a4da6 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.uhighlight.BoundedBreakIteratorScanner; import org.apache.lucene.search.uhighlight.CustomPassageFormatter; import org.apache.lucene.search.uhighlight.CustomSeparatorBreakIterator; import org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter; +import org.apache.lucene.search.uhighlight.PassageFormatter; import org.apache.lucene.search.uhighlight.Snippet; import org.apache.lucene.search.uhighlight.UnifiedHighlighter.OffsetSource; import org.apache.lucene.util.BytesRef; @@ -34,7 +35,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; @@ -54,7 +54,7 @@ public class UnifiedHighlighter implements Highlighter { public boolean canHighlight(MappedFieldType fieldType) { return true; } - + @Override public HighlightField highlight(HighlighterContext highlighterContext) { MappedFieldType fieldType = highlighterContext.fieldType; @@ -62,23 +62,18 @@ public class UnifiedHighlighter implements Highlighter { SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; - CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], - field.fieldOptions().postTags()[0], encoder); final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset(); List snippets = new ArrayList<>(); int numberOfFragments; try { - final Analyzer analyzer = - getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); - List fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); - fieldValues = fieldValues.stream() - .map((s) -> convertFieldValue(fieldType, s)) - .collect(Collectors.toList()); + final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldType); + List fieldValues = loadFieldValues(fieldType, field, context, hitContext); if (fieldValues.size() == 0) { return null; } + final PassageFormatter passageFormatter = getPassageFormatter(field, encoder); final IndexSearcher searcher = new IndexSearcher(hitContext.reader()); final CustomUnifiedHighlighter highlighter; final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); @@ -145,7 +140,27 @@ public class UnifiedHighlighter implements Highlighter { return null; } - private BreakIterator getBreakIterator(SearchContextHighlight.Field field) { + protected PassageFormatter getPassageFormatter(SearchContextHighlight.Field field, Encoder encoder) { + CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], + field.fieldOptions().postTags()[0], encoder); + return passageFormatter; + } + + + protected Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { + return HighlightUtils.getAnalyzer(docMapper, type); + } + + protected List loadFieldValues(MappedFieldType fieldType, SearchContextHighlight.Field field, SearchContext context, + FetchSubPhase.HitContext hitContext) throws IOException { + List fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext); + fieldValues = fieldValues.stream() + .map((s) -> convertFieldValue(fieldType, s)) + .collect(Collectors.toList()); + return fieldValues; + } + + protected BreakIterator getBreakIterator(SearchContextHighlight.Field field) { final SearchContextHighlight.FieldOptions fieldOptions = field.fieldOptions(); final Locale locale = fieldOptions.boundaryScannerLocale() != null ? fieldOptions.boundaryScannerLocale() : @@ -168,7 +183,7 @@ public class UnifiedHighlighter implements Highlighter { } } - private static List filterSnippets(List snippets, int numberOfFragments) { + protected static List filterSnippets(List snippets, int numberOfFragments) { //We need to filter the snippets as due to no_match_size we could have //either highlighted snippets or non highlighted ones and we don't want to mix those up @@ -203,17 +218,7 @@ public class UnifiedHighlighter implements Highlighter { return filteredSnippets; } - static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { - if (type instanceof KeywordFieldMapper.KeywordFieldType) { - KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; - if (keywordFieldType.normalizer() != null) { - return keywordFieldType.normalizer(); - } - } - return docMapper.mappers().indexAnalyzer(); - } - - static String convertFieldValue(MappedFieldType type, Object value) { + protected static String convertFieldValue(MappedFieldType type, Object value) { if (value instanceof BytesRef) { return type.valueForDisplay(value).toString(); } else { @@ -221,14 +226,14 @@ public class UnifiedHighlighter implements Highlighter { } } - private static String mergeFieldValues(List fieldValues, char valuesSeparator) { + protected static String mergeFieldValues(List fieldValues, char valuesSeparator) { //postings highlighter accepts all values in a single string, as offsets etc. need to match with content //loaded from stored fields, we merge all values using a proper separator String rawValue = Strings.collectionToDelimitedString(fieldValues, String.valueOf(valuesSeparator)); return rawValue.substring(0, Math.min(rawValue.length(), Integer.MAX_VALUE - 1)); } - private OffsetSource getOffsetSource(MappedFieldType fieldType) { + protected OffsetSource getOffsetSource(MappedFieldType fieldType) { if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { return fieldType.storeTermVectors() ? OffsetSource.POSTINGS_WITH_TERM_VECTORS : OffsetSource.POSTINGS; } From 9543992d8e1c15215055942a15b670d7b0213d75 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 18 Sep 2018 11:51:11 +0100 Subject: [PATCH 16/32] HLRC: Get ML calendars (#33760) --- .../client/MLRequestConverters.java | 17 ++- .../client/MachineLearningClient.java | 40 +++++++ .../client/ml/GetCalendarsRequest.java | 104 ++++++++++++++++++ .../client/ml/GetCalendarsResponse.java | 86 +++++++++++++++ .../client/ml/job/results/AnomalyRecord.java | 16 +-- .../client/ml/job/results/Bucket.java | 15 +-- .../ml/job/results/BucketInfluencer.java | 16 +-- .../client/ml/job/results/Influencer.java | 16 +-- .../client/ml/job/results/OverallBucket.java | 16 +-- .../client/ml/job/results/Result.java | 1 - .../client/MLRequestConvertersTests.java | 25 ++++- .../client/MachineLearningIT.java | 27 ++++- .../MlClientDocumentationIT.java | 65 +++++++++++ .../client/ml/GetCalendarsRequestTests.java | 46 ++++++++ .../client/ml/GetCalendarsResponseTests.java | 52 +++++++++ .../high-level/ml/get-calendars.asciidoc | 83 ++++++++++++++ .../high-level/ml/put-calendar.asciidoc | 2 +- .../high-level/supported-apis.asciidoc | 2 + 18 files changed, 563 insertions(+), 66 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsResponseTests.java create mode 100644 docs/java-rest/high-level/ml/get-calendars.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index 1a681822eca..bc2ff7b17d5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -34,6 +34,7 @@ import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; +import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; @@ -229,7 +230,7 @@ final class MLRequestConverters { return request; } - static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) throws IOException { + static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") @@ -305,7 +306,7 @@ final class MLRequestConverters { return request; } - static Request postData(PostDataRequest postDataRequest) throws IOException { + static Request postData(PostDataRequest postDataRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") .addPathPartAsIs("ml") @@ -359,4 +360,16 @@ final class MLRequestConverters { request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE)); return request; } + + static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("calendars") + .addPathPart(getCalendarsRequest.getCalendarId()) + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(createEntity(getCalendarsRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index caaf1326dbd..5edb5115d85 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -31,6 +31,8 @@ import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; +import org.elasticsearch.client.ml.GetCalendarsRequest; +import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; @@ -792,6 +794,44 @@ public final class MachineLearningClient { Collections.emptySet()); } + /** + * Gets a single or multiple calendars. + *

+ * For additional info + * see ML GET calendars documentation + * + * @param request The calendars request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return {@link GetCalendarsResponse} response object containing the {@link org.elasticsearch.client.ml.calendars.Calendar} + * objects and the number of calendars found + */ + public GetCalendarsResponse getCalendars(GetCalendarsRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getCalendars, + options, + GetCalendarsResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets a single or multiple calendars, notifies listener once the requested records are retrieved. + *

+ * For additional info + * see ML GET calendars documentation + * + * @param request The calendars request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void getCalendarsAsync(GetCalendarsRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getCalendars, + options, + GetCalendarsResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Gets the influencers for a Machine Learning Job. *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java new file mode 100644 index 00000000000..322efc19927 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java @@ -0,0 +1,104 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.calendars.Calendar; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import org.elasticsearch.client.ml.job.util.PageParams; + +import java.io.IOException; +import java.util.Objects; + +public class GetCalendarsRequest extends ActionRequest implements ToXContentObject { + + public static final ObjectParser PARSER = + new ObjectParser<>("get_calendars_request", GetCalendarsRequest::new); + + static { + PARSER.declareString(GetCalendarsRequest::setCalendarId, Calendar.ID); + PARSER.declareObject(GetCalendarsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); + } + + private String calendarId; + private PageParams pageParams; + + public GetCalendarsRequest() { + } + + public GetCalendarsRequest(String calendarId) { + this.calendarId = calendarId; + } + + public String getCalendarId() { + return calendarId; + } + + public void setCalendarId(String calendarId) { + this.calendarId = calendarId; + } + + public PageParams getPageParams() { + return pageParams; + } + + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (calendarId != null) { + builder.field(Calendar.ID.getPreferredName(), calendarId); + } + if (pageParams != null) { + builder.field(PageParams.PAGE.getPreferredName(), pageParams); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(calendarId, pageParams); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetCalendarsRequest other = (GetCalendarsRequest) obj; + return Objects.equals(calendarId, other.calendarId) && Objects.equals(pageParams, other.pageParams); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java new file mode 100644 index 00000000000..e07b90f34e2 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.calendars.Calendar; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class GetCalendarsResponse extends AbstractResultResponse { + + public static final ParseField RESULTS_FIELD = new ParseField("calendars"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("calendars_response", true, + a -> new GetCalendarsResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(constructorArg(), Calendar.PARSER, RESULTS_FIELD); + PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); + } + + public static GetCalendarsResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + GetCalendarsResponse(List calendars, long count) { + super(RESULTS_FIELD, calendars, count); + } + + /** + * The collection of {@link Calendar} objects found in the query + */ + public List calendars() { + return results; + } + + @Override + public int hashCode() { + return Objects.hash(results, count); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + GetCalendarsResponse other = (GetCalendarsResponse) obj; + return Objects.equals(results, other.results) && count == other.count; + } + + @Override + public final String toString() { + return Strings.toString(this); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java index db4483fef4b..c10610a872f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java @@ -19,16 +19,14 @@ package org.elasticsearch.client.ml.job.results; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.TimeUtil; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.Collections; import java.util.Date; import java.util.List; @@ -90,15 +88,9 @@ public class AnomalyRecord implements ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli()); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), + Result.TIMESTAMP, ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java index 2dfed4c3834..9f549f16bbc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java @@ -19,16 +19,14 @@ package org.elasticsearch.client.ml.job.results; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.TimeUtil; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Collections; import java.util.Date; @@ -63,15 +61,8 @@ public class Bucket implements ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli()); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), Result.TIMESTAMP, ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE); PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java index 6fc2a9b8b2d..ade5a5a2f50 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java @@ -19,16 +19,14 @@ package org.elasticsearch.client.ml.job.results; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.TimeUtil; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.Objects; @@ -56,15 +54,9 @@ public class BucketInfluencer implements ToXContentObject { static { PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli()); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), + Result.TIMESTAMP, ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE); PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java index 28ceb243bf6..4892b7f9346 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java @@ -19,16 +19,14 @@ package org.elasticsearch.client.ml.job.results; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.TimeUtil; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.Date; import java.util.Objects; @@ -61,15 +59,9 @@ public class Influencer implements ToXContentObject { PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == Token.VALUE_STRING) { - return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli()); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), + Result.TIMESTAMP, ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE); PARSER.declareDouble(Influencer::setProbability, PROBABILITY); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java index eaf050f8be9..722c2361b67 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java @@ -19,16 +19,14 @@ package org.elasticsearch.client.ml.job.results; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.TimeUtil; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.time.format.DateTimeFormatter; import java.util.Collections; import java.util.Date; import java.util.List; @@ -56,15 +54,9 @@ public class OverallBucket implements ToXContentObject { a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3])); static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new Date(p.longValue()); - } else if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return new Date(DateFormatters.toZonedDateTime(DateTimeFormatter.ISO_INSTANT.parse(p.text())).toInstant().toEpochMilli()); - } - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" - + Result.TIMESTAMP.getPreferredName() + "]"); - }, Result.TIMESTAMP, ObjectParser.ValueType.VALUE); + PARSER.declareField(ConstructingObjectParser.constructorArg(), + (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), + Result.TIMESTAMP, ObjectParser.ValueType.VALUE); PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE); PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java index a7f8933a0a1..f98aef55f5b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java @@ -28,7 +28,6 @@ public final class Result { /** * Serialisation fields */ - public static final ParseField TYPE = new ParseField("result"); public static final ParseField RESULT_TYPE = new ParseField("result_type"); public static final ParseField TIMESTAMP = new ParseField("timestamp"); public static final ParseField IS_INTERIM = new ParseField("is_interim"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 61122901b86..fdd4200ee81 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; +import org.elasticsearch.client.ml.GetCalendarsRequest; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; @@ -259,7 +260,7 @@ public class MLRequestConvertersTests extends ESTestCase { assertEquals(Boolean.toString(true), request.getParameters().get("force")); } - public void testDeleteForecast() throws Exception { + public void testDeleteForecast() { String jobId = randomAlphaOfLength(10); DeleteForecastRequest deleteForecastRequest = new DeleteForecastRequest(jobId); @@ -415,6 +416,28 @@ public class MLRequestConvertersTests extends ESTestCase { } } + public void testGetCalendars() throws IOException { + GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest(); + String expectedEndpoint = "/_xpack/ml/calendars"; + + if (randomBoolean()) { + String calendarId = randomAlphaOfLength(10); + getCalendarsRequest.setCalendarId(calendarId); + expectedEndpoint += "/" + calendarId; + } + if (randomBoolean()) { + getCalendarsRequest.setPageParams(new PageParams(10, 20)); + } + + Request request = MLRequestConverters.getCalendars(getCalendarsRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(expectedEndpoint, request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + GetCalendarsRequest parsedRequest = GetCalendarsRequest.PARSER.apply(parser, null); + assertThat(parsedRequest, equalTo(getCalendarsRequest)); + } + } + private static Job createValidJob(String jobId) { AnalysisConfig.Builder analysisConfig = AnalysisConfig.builder(Collections.singletonList( Detector.builder().setFunction("count").build())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 5349378e335..e90d541b9c7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -32,6 +32,8 @@ import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.ForecastJobResponse; +import org.elasticsearch.client.ml.GetCalendarsRequest; +import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; import org.elasticsearch.client.ml.GetDatafeedResponse; import org.elasticsearch.client.ml.GetJobRequest; @@ -483,7 +485,6 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { } public void testPutCalendar() throws IOException { - Calendar calendar = CalendarTests.testInstance(); MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); PutCalendarResponse putCalendarResponse = execute(new PutCalendarRequest(calendar), machineLearningClient::putCalendar, @@ -492,6 +493,30 @@ public class MachineLearningIT extends ESRestHighLevelClientTestCase { assertThat(putCalendarResponse.getCalendar(), equalTo(calendar)); } + public void testGetCalendars() throws Exception { + Calendar calendar1 = CalendarTests.testInstance(); + Calendar calendar2 = CalendarTests.testInstance(); + + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putCalendar(new PutCalendarRequest(calendar1), RequestOptions.DEFAULT); + machineLearningClient.putCalendar(new PutCalendarRequest(calendar2), RequestOptions.DEFAULT); + + GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest(); + getCalendarsRequest.setCalendarId("_all"); + GetCalendarsResponse getCalendarsResponse = execute(getCalendarsRequest, machineLearningClient::getCalendars, + machineLearningClient::getCalendarsAsync); + assertEquals(2, getCalendarsResponse.count()); + assertEquals(2, getCalendarsResponse.calendars().size()); + assertThat(getCalendarsResponse.calendars().stream().map(Calendar::getId).collect(Collectors.toList()), + hasItems(calendar1.getId(), calendar1.getId())); + + getCalendarsRequest.setCalendarId(calendar1.getId()); + getCalendarsResponse = execute(getCalendarsRequest, machineLearningClient::getCalendars, + machineLearningClient::getCalendarsAsync); + assertEquals(1, getCalendarsResponse.count()); + assertEquals(calendar1, getCalendarsResponse.calendars().get(0)); + } + public static String randomValidJobId() { CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray()); return generator.ofCodePointsLength(random(), 10, 10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index f0f7ffd939f..ddaf9d8db6c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -43,6 +43,8 @@ import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; +import org.elasticsearch.client.ml.GetCalendarsRequest; +import org.elasticsearch.client.ml.GetCalendarsResponse; import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetDatafeedRequest; @@ -880,6 +882,7 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); + ForecastJobResponse forecastJobResponse = client.machineLearning(). forecastJob(new ForecastJobRequest(job.getId()), RequestOptions.DEFAULT); String forecastId = forecastJobResponse.getForecastId(); @@ -1526,4 +1529,66 @@ public class MlClientDocumentationIT extends ESRestHighLevelClientTestCase { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + public void testGetCalendar() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + Calendar calendar = new Calendar("holidays", Collections.singletonList("job_1"), "A calendar for public holidays"); + PutCalendarRequest putRequest = new PutCalendarRequest(calendar); + client.machineLearning().putCalendar(putRequest, RequestOptions.DEFAULT); + { + //tag::x-pack-ml-get-calendars-request + GetCalendarsRequest request = new GetCalendarsRequest(); // <1> + //end::x-pack-ml-get-calendars-request + + //tag::x-pack-ml-get-calendars-id + request.setCalendarId("holidays"); // <1> + //end::x-pack-ml-get-calendars-id + + //tag::x-pack-ml-get-calendars-page + request.setPageParams(new PageParams(10, 20)); // <1> + //end::x-pack-ml-get-calendars-page + + // reset page params + request.setPageParams(null); + + //tag::x-pack-ml-get-calendars-execution + GetCalendarsResponse response = client.machineLearning().getCalendars(request, RequestOptions.DEFAULT); + //end::x-pack-ml-get-calendars-execution + + // tag::x-pack-ml-get-calendars-response + long count = response.count(); // <1> + List calendars = response.calendars(); // <2> + // end::x-pack-ml-get-calendars-response + assertEquals(1, calendars.size()); + } + { + GetCalendarsRequest request = new GetCalendarsRequest("holidays"); + + // tag::x-pack-ml-get-calendars-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetCalendarsResponse getCalendarsResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-get-calendars-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-get-calendars-execute-async + client.machineLearning().getCalendarsAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-get-calendars-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java new file mode 100644 index 00000000000..b7ca44fd5fa --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsRequestTests.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class GetCalendarsRequestTests extends AbstractXContentTestCase { + + @Override + protected GetCalendarsRequest createTestInstance() { + GetCalendarsRequest request = new GetCalendarsRequest(); + request.setCalendarId(randomAlphaOfLength(9)); + if (randomBoolean()) { + request.setPageParams(new PageParams(1, 2)); + } + return request; + } + + @Override + protected GetCalendarsRequest doParseInstance(XContentParser parser) { + return GetCalendarsRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsResponseTests.java new file mode 100644 index 00000000000..fd28e410cdc --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCalendarsResponseTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.calendars.Calendar; +import org.elasticsearch.client.ml.calendars.CalendarTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetCalendarsResponseTests extends AbstractXContentTestCase { + + @Override + protected GetCalendarsResponse createTestInstance() { + List calendars = new ArrayList<>(); + int count = randomIntBetween(0, 3); + for (int i=0; i Constructing a new request for all calendars + + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-id] +-------------------------------------------------- +<1> Construct a request for the single calendar `holidays` + + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-page] +-------------------------------------------------- +<1> The page parameters `from` and `size`. `from` specifies the number of calendars to skip. +`size` specifies the maximum number of calendars to get. Defaults to `0` and `100` respectively. + +[[java-rest-high-x-pack-ml-get-calendars-execution]] +==== Execution +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execution] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-get-calendars-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-execute-async] +-------------------------------------------------- +<1> The `GetCalendarsRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `GetCalendarsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-x-pack-ml-get-calendars-response]] +==== Get calendars Response + +The returned `GetCalendarsResponse` contains the requested calendars: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-calendars-response] +-------------------------------------------------- +<1> The count of calendars that were matched +<2> The calendars retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/ml/put-calendar.asciidoc b/docs/java-rest/high-level/ml/put-calendar.asciidoc index e6814c76fad..5d163f37eb4 100644 --- a/docs/java-rest/high-level/ml/put-calendar.asciidoc +++ b/docs/java-rest/high-level/ml/put-calendar.asciidoc @@ -4,7 +4,7 @@ Creates a new {ml} calendar. The API accepts a `PutCalendarRequest` and responds with a `PutCalendarResponse` object. -[[java-rest-high-x-pack-ml-get-calendars-request]] +[[java-rest-high-x-pack-ml-put-calendar-request]] ==== Put Calendar Request A `PutCalendarRequest` is constructed with a Calendar object diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 78a9f0bc7c2..2c907dd2053 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -231,6 +231,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> * <> include::ml/put-job.asciidoc[] @@ -252,6 +253,7 @@ include::ml/get-records.asciidoc[] include::ml/post-data.asciidoc[] include::ml/get-influencers.asciidoc[] include::ml/get-categories.asciidoc[] +include::ml/get-calendars.asciidoc[] include::ml/put-calendar.asciidoc[] == Migration APIs From 421f58e17243a08144a1f819d2896da4cef5877b Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 18 Sep 2018 12:01:16 +0100 Subject: [PATCH 17/32] Remove discovery-file plugin (#33257) In #33241 we moved the file-based discovery functionality to core Elasticsearch, but preserved the `discovery-file` plugin, and support for the existing location of the `unicast_hosts.txt` file, for BWC reasons. This commit completes the removal of this plugin. --- docs/plugins/discovery-file.asciidoc | 14 ----- docs/plugins/discovery.asciidoc | 6 -- docs/reference/cat/plugins.asciidoc | 1 - .../migration/migrate_7_0/plugins.asciidoc | 10 ++- plugins/discovery-file/build.gradle | 61 ------------------- .../config/discovery-file/unicast_hosts.txt | 20 ------ .../file/FileBasedDiscoveryPlugin.java | 51 ---------------- ...leBasedDiscoveryClientYamlTestSuiteIT.java | 40 ------------ ...eBasedDiscoveryPluginDeprecationTests.java | 32 ---------- .../test/discovery_file/10_basic.yml | 13 ---- .../tests/module_and_plugin_test_cases.bash | 8 --- .../zen/FileBasedUnicastHostsProvider.java | 26 +++----- .../FileBasedUnicastHostsProviderTests.java | 46 +------------- 13 files changed, 19 insertions(+), 309 deletions(-) delete mode 100644 docs/plugins/discovery-file.asciidoc delete mode 100644 plugins/discovery-file/build.gradle delete mode 100644 plugins/discovery-file/config/discovery-file/unicast_hosts.txt delete mode 100644 plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java delete mode 100644 plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java delete mode 100644 plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginDeprecationTests.java delete mode 100644 plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yml diff --git a/docs/plugins/discovery-file.asciidoc b/docs/plugins/discovery-file.asciidoc deleted file mode 100644 index 4f2182da056..00000000000 --- a/docs/plugins/discovery-file.asciidoc +++ /dev/null @@ -1,14 +0,0 @@ -[[discovery-file]] -=== File-Based Discovery Plugin - -The functionality provided by the `discovery-file` plugin is now available in -Elasticsearch without requiring a plugin. This plugin still exists to ensure -backwards compatibility, but it will be removed in a future version. - -On installation, this plugin creates a file at -`$ES_PATH_CONF/discovery-file/unicast_hosts.txt` that comprises comments that -describe how to use it. It is preferable not to install this plugin and instead -to create this file, and its containing directory, using standard tools. - -:plugin_name: discovery-file -include::install_remove.asciidoc[] diff --git a/docs/plugins/discovery.asciidoc b/docs/plugins/discovery.asciidoc index 39afbea96dc..46b61146b12 100644 --- a/docs/plugins/discovery.asciidoc +++ b/docs/plugins/discovery.asciidoc @@ -21,10 +21,6 @@ The Azure Classic discovery plugin uses the Azure Classic API for unicast discov The Google Compute Engine discovery plugin uses the GCE API for unicast discovery. -<>:: - -The File-based discovery plugin allows providing the unicast hosts list through a dynamically updatable file. - [float] ==== Community contributed discovery plugins @@ -38,5 +34,3 @@ include::discovery-ec2.asciidoc[] include::discovery-azure-classic.asciidoc[] include::discovery-gce.asciidoc[] - -include::discovery-file.asciidoc[] diff --git a/docs/reference/cat/plugins.asciidoc b/docs/reference/cat/plugins.asciidoc index 9cb83321835..265a9e270f5 100644 --- a/docs/reference/cat/plugins.asciidoc +++ b/docs/reference/cat/plugins.asciidoc @@ -23,7 +23,6 @@ U7321H6 analysis-stempel {version} The Stempel (Polish) Analysis plugin i U7321H6 analysis-ukrainian {version} The Ukrainian Analysis plugin integrates the Lucene UkrainianMorfologikAnalyzer into elasticsearch. U7321H6 discovery-azure-classic {version} The Azure Classic Discovery plugin allows to use Azure Classic API for the unicast discovery mechanism U7321H6 discovery-ec2 {version} The EC2 discovery plugin allows to use AWS API for the unicast discovery mechanism. -U7321H6 discovery-file {version} Discovery file plugin enables unicast discovery from hosts stored in a file. U7321H6 discovery-gce {version} The Google Compute Engine (GCE) Discovery plugin allows to use GCE API for the unicast discovery mechanism. U7321H6 ingest-attachment {version} Ingest processor that uses Apache Tika to extract contents U7321H6 ingest-geoip {version} Ingest processor that uses looksup geo data based on ip adresses using the Maxmind geo database diff --git a/docs/reference/migration/migrate_7_0/plugins.asciidoc b/docs/reference/migration/migrate_7_0/plugins.asciidoc index f8434993078..462823a61fd 100644 --- a/docs/reference/migration/migrate_7_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_7_0/plugins.asciidoc @@ -22,4 +22,12 @@ See {plugins}/repository-gcs-client.html#repository-gcs-client[Google Cloud Stor ==== Analysis Plugin changes * The misspelled helper method `requriesAnalysisSettings(AnalyzerProvider provider)` has been -renamed to `requiresAnalysisSettings` \ No newline at end of file +renamed to `requiresAnalysisSettings` + +==== File-based discovery plugin + +* This plugin has been removed since its functionality is now part of +Elasticsearch and requires no plugin. The location of the hosts file has moved +from `$ES_PATH_CONF/file-discovery/unicast_hosts.txt` to +`$ES_PATH_CONF/unicast_hosts.txt`. See <> for further information. diff --git a/plugins/discovery-file/build.gradle b/plugins/discovery-file/build.gradle deleted file mode 100644 index e7f2b344271..00000000000 --- a/plugins/discovery-file/build.gradle +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.elasticsearch.gradle.test.ClusterConfiguration -import org.elasticsearch.gradle.test.ClusterFormationTasks -import org.elasticsearch.gradle.test.NodeInfo - -esplugin { - description 'Discovery file plugin enables unicast discovery from hosts stored in a file.' - classname 'org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin' -} - -bundlePlugin { - from('config/discovery-file') { - into 'config' - } -} - -task setupSeedNodeAndUnicastHostsFile(type: DefaultTask) { - mustRunAfter(precommit) -} -// setup the initial cluster with one node that will serve as the seed node -// for unicast discovery -ClusterConfiguration config = new ClusterConfiguration(project) -config.distribution = System.getProperty('tests.distribution', 'integ-test-zip') -config.clusterName = 'discovery-file-test-cluster' -List nodes = ClusterFormationTasks.setup(project, 'initialCluster', setupSeedNodeAndUnicastHostsFile, config) -File srcUnicastHostsFile = file('build/cluster/unicast_hosts.txt') - -// write the unicast_hosts.txt file to a temporary location to be used by the second cluster -setupSeedNodeAndUnicastHostsFile.doLast { - // write the unicast_hosts.txt file to a temp file in the build directory - srcUnicastHostsFile.setText(nodes.get(0).transportUri(), 'UTF-8') -} - -// second cluster, which will connect to the first via the unicast_hosts.txt file -integTestCluster { - dependsOn setupSeedNodeAndUnicastHostsFile - clusterName = 'discovery-file-test-cluster' - setting 'discovery.zen.hosts_provider', 'file' - extraConfigFile 'discovery-file/unicast_hosts.txt', srcUnicastHostsFile -} - -integTestRunner.finalizedBy ':plugins:discovery-file:initialCluster#stop' - diff --git a/plugins/discovery-file/config/discovery-file/unicast_hosts.txt b/plugins/discovery-file/config/discovery-file/unicast_hosts.txt deleted file mode 100644 index 5e265e0f295..00000000000 --- a/plugins/discovery-file/config/discovery-file/unicast_hosts.txt +++ /dev/null @@ -1,20 +0,0 @@ -# The unicast_hosts.txt file contains the list of unicast hosts to connect to -# for pinging during the discovery process, when using the file-based discovery -# mechanism. This file should contain one entry per line, where an entry is a -# host/port combination. The host and port should be separated by a `:`. If -# the port is left off, a default port of 9300 is assumed. For example, if the -# cluster has three nodes that participate in the discovery process: -# (1) 66.77.88.99 running on port 9300 (2) 66.77.88.100 running on port 9305 -# and (3) 66.77.88.99 running on port 10005, then this file should contain the -# following text: -# -#10.10.10.5 -#10.10.10.6:9305 -#10.10.10.5:10005 -# -# For IPv6 addresses, make sure to put a bracket around the host part of the address, -# for example: [2001:cdba:0000:0000:0000:0000:3257:9652]:9301 (where 9301 is the port). -# -# NOTE: all lines starting with a `#` are comments, and comments must exist -# on lines of their own (i.e. comments cannot begin in the middle of a line) -# \ No newline at end of file diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java deleted file mode 100644 index 48fa49b9a8a..00000000000 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.file; - -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.zen.UnicastHostsProvider; -import org.elasticsearch.plugins.DiscoveryPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.transport.TransportService; - -import java.util.Collections; -import java.util.Map; -import java.util.function.Supplier; - -public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin { - - private final DeprecationLogger deprecationLogger; - static final String DEPRECATION_MESSAGE - = "File-based discovery is now built into Elasticsearch and does not require the discovery-file plugin"; - - public FileBasedDiscoveryPlugin(Settings settings) { - deprecationLogger = new DeprecationLogger(Loggers.getLogger(this.getClass(), settings)); - } - - @Override - public Map> getZenHostsProviders(TransportService transportService, - NetworkService networkService) { - deprecationLogger.deprecated(DEPRECATION_MESSAGE); - return Collections.emptyMap(); - } -} diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java deleted file mode 100644 index d2ac2095bdf..00000000000 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.file; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -/** - * Integration tests to make sure the file-based discovery plugin works in a cluster. - */ -public class FileBasedDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public FileBasedDiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return ESClientYamlSuiteTestCase.createParameters(); - } -} diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginDeprecationTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginDeprecationTests.java deleted file mode 100644 index 643c7b2c95c..00000000000 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPluginDeprecationTests.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.discovery.file; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -import static org.elasticsearch.discovery.file.FileBasedDiscoveryPlugin.DEPRECATION_MESSAGE; - -public class FileBasedDiscoveryPluginDeprecationTests extends ESTestCase { - public void testDeprecationWarning() { - new FileBasedDiscoveryPlugin(Settings.EMPTY).getZenHostsProviders(null, null); - assertWarnings(DEPRECATION_MESSAGE); - } -} diff --git a/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yml b/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yml deleted file mode 100644 index 138115da112..00000000000 --- a/plugins/discovery-file/src/test/resources/rest-api-spec/test/discovery_file/10_basic.yml +++ /dev/null @@ -1,13 +0,0 @@ -# Integration tests for file-based discovery -# -"Ensure cluster formed successfully with discovery file": - # make sure both nodes joined the cluster - - do: - cluster.health: - wait_for_nodes: 2 - - # make sure the cluster was formed with the correct name - - do: - cluster.state: {} - - - match: { cluster_name: 'discovery-file-test-cluster' } # correct cluster name, we formed the cluster we expected to diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 7aeb03851a5..9a1ff6f2e23 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -224,10 +224,6 @@ fi install_and_check_plugin discovery ec2 aws-java-sdk-core-*.jar } -@test "[$GROUP] install discovery-file plugin" { - install_and_check_plugin discovery file -} - @test "[$GROUP] install ingest-attachment plugin" { # we specify the version on the poi-3.17.jar so that the test does # not spuriously pass if the jar is missing but the other poi jars @@ -364,10 +360,6 @@ fi remove_plugin discovery-ec2 } -@test "[$GROUP] remove discovery-file plugin" { - remove_plugin discovery-file -} - @test "[$GROUP] remove ingest-attachment plugin" { remove_plugin ingest-attachment } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java index f339ae43a70..f9b20580ecd 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProvider.java @@ -49,23 +49,21 @@ public class FileBasedUnicastHostsProvider extends AbstractComponent implements public static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; private final Path unicastHostsFilePath; - private final Path legacyUnicastHostsFilePath; public FileBasedUnicastHostsProvider(Settings settings, Path configFile) { super(settings); this.unicastHostsFilePath = configFile.resolve(UNICAST_HOSTS_FILE); - this.legacyUnicastHostsFilePath = configFile.resolve("discovery-file").resolve(UNICAST_HOSTS_FILE); } private List getHostsList() { if (Files.exists(unicastHostsFilePath)) { - return readFileContents(unicastHostsFilePath); - } - - if (Files.exists(legacyUnicastHostsFilePath)) { - deprecationLogger.deprecated("Found dynamic hosts list at [{}] but this path is deprecated. This list should be at [{}] " + - "instead. Support for the deprecated path will be removed in future.", legacyUnicastHostsFilePath, unicastHostsFilePath); - return readFileContents(legacyUnicastHostsFilePath); + try (Stream lines = Files.lines(unicastHostsFilePath)) { + return lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments + .collect(Collectors.toList()); + } catch (IOException e) { + logger.warn(() -> new ParameterizedMessage("failed to read file [{}]", unicastHostsFilePath), e); + return Collections.emptyList(); + } } logger.warn("expected, but did not find, a dynamic hosts list at [{}]", unicastHostsFilePath); @@ -73,16 +71,6 @@ public class FileBasedUnicastHostsProvider extends AbstractComponent implements return Collections.emptyList(); } - private List readFileContents(Path path) { - try (Stream lines = Files.lines(path)) { - return lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments - .collect(Collectors.toList()); - } catch (IOException e) { - logger.warn(() -> new ParameterizedMessage("failed to read file [{}]", unicastHostsFilePath), e); - return Collections.emptyList(); - } - } - @Override public List buildDynamicHosts(HostsResolver hostsResolver) { final List transportAddresses = hostsResolver.resolveHosts(getHostsList(), 1); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java index 8922a38ea1e..b45daaadfa5 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/FileBasedUnicastHostsProviderTests.java @@ -52,11 +52,9 @@ import static org.elasticsearch.discovery.zen.FileBasedUnicastHostsProvider.UNIC public class FileBasedUnicastHostsProviderTests extends ESTestCase { - private boolean legacyLocation; private ThreadPool threadPool; private ExecutorService executorService; private MockTransportService transportService; - private Path configPath; @Before public void setUp() throws Exception { @@ -108,24 +106,12 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { assertEquals(9300, nodes.get(2).getPort()); } - public void testBuildDynamicNodesLegacyLocation() throws Exception { - legacyLocation = true; - testBuildDynamicNodes(); - assertDeprecatedLocationWarning(); - } - public void testEmptyUnicastHostsFile() throws Exception { final List hostEntries = Collections.emptyList(); final List addresses = setupAndRunHostProvider(hostEntries); assertEquals(0, addresses.size()); } - public void testEmptyUnicastHostsFileLegacyLocation() throws Exception { - legacyLocation = true; - testEmptyUnicastHostsFile(); - assertDeprecatedLocationWarning(); - } - public void testUnicastHostsDoesNotExist() { final Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(settings, createTempDir().toAbsolutePath()); @@ -141,12 +127,6 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { assertEquals(0, addresses.size()); } - public void testInvalidHostEntriesLegacyLocation() throws Exception { - legacyLocation = true; - testInvalidHostEntries(); - assertDeprecatedLocationWarning(); - } - public void testSomeInvalidHostEntries() throws Exception { final List hostEntries = Arrays.asList("192.168.0.1:9300:9300", "192.168.0.1:9301"); final List addresses = setupAndRunHostProvider(hostEntries); @@ -155,12 +135,6 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { assertEquals(9301, addresses.get(0).getPort()); } - public void testSomeInvalidHostEntriesLegacyLocation() throws Exception { - legacyLocation = true; - testSomeInvalidHostEntries(); - assertDeprecatedLocationWarning(); - } - // sets up the config dir, writes to the unicast hosts file in the config dir, // and then runs the file-based unicast host provider to get the list of discovery nodes private List setupAndRunHostProvider(final List hostEntries) throws IOException { @@ -168,15 +142,9 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { final Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), homeDir) .build(); - if (randomBoolean()) { - configPath = homeDir.resolve("config"); - } else { - configPath = createTempDir(); - } - final Path discoveryFilePath = legacyLocation ? configPath.resolve("discovery-file") : configPath; - Files.createDirectories(discoveryFilePath); - final Path unicastHostsPath = discoveryFilePath.resolve(UNICAST_HOSTS_FILE); - try (BufferedWriter writer = Files.newBufferedWriter(unicastHostsPath)) { + final Path configPath = randomBoolean() ? homeDir.resolve("config") : createTempDir(); + Files.createDirectories(configPath); + try (BufferedWriter writer = Files.newBufferedWriter(configPath.resolve(UNICAST_HOSTS_FILE))) { writer.write(String.join("\n", hostEntries)); } @@ -184,12 +152,4 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, TimeValue.timeValueSeconds(10))); } - - private void assertDeprecatedLocationWarning() { - assertWarnings("Found dynamic hosts list at [" + - configPath.resolve("discovery-file").resolve(UNICAST_HOSTS_FILE) + - "] but this path is deprecated. This list should be at [" + - configPath.resolve(UNICAST_HOSTS_FILE) + - "] instead. Support for the deprecated path will be removed in future."); - } } From 0d4683850c069d973050880f63970c7fa57b72af Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 18 Sep 2018 14:12:18 +0300 Subject: [PATCH 18/32] Moved the problematic tests to the tests file that is not considered when certain locales are used (#33785) --- x-pack/qa/sql/src/main/resources/case-functions.sql-spec | 6 ++++++ .../qa/sql/src/main/resources/string-functions.sql-spec | 8 -------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/x-pack/qa/sql/src/main/resources/case-functions.sql-spec b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec index 899d7cb0a6c..f18f9c7eaa1 100644 --- a/x-pack/qa/sql/src/main/resources/case-functions.sql-spec +++ b/x-pack/qa/sql/src/main/resources/case-functions.sql-spec @@ -11,3 +11,9 @@ SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT upperCasingTheSecondLetterFromTheRightFromFirstNameWithWhere SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) f, COUNT(*) c FROM "test_emp" WHERE CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1))='AlejandRo' GROUP BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) ORDER BY CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT(RIGHT("first_name",2),1))),RIGHT("first_name",1)) LIMIT 10; + +ucaseInline1 +SELECT UCASE('ElAsTiC') upper; + +ucaseInline3 +SELECT UCASE(' elastic ') upper; \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec index 8fe35780443..f039e5c487e 100644 --- a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec +++ b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec @@ -157,17 +157,9 @@ SELECT SUBSTRING('Elasticsearch', 10, 10) sub; ucaseFilter SELECT UCASE(gender) uppercased, COUNT(*) count FROM "test_emp" WHERE UCASE(gender) = 'F' GROUP BY UCASE(gender); -//https://github.com/elastic/elasticsearch/issues/33687 -//ucaseInline1 -//SELECT UCASE('ElAsTiC') upper; - ucaseInline2 SELECT UCASE('') upper; -//https://github.com/elastic/elasticsearch/issues/33687 -//ucaseInline3 -//SELECT UCASE(' elastic ') upper; - // // Group and order by // From 8e0d74adadab17b8aff4dfdf9e5a37fcaffa145f Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Tue, 18 Sep 2018 12:56:37 +0100 Subject: [PATCH 19/32] [ML][HLRC] Remove deleted property from Job (#33763) The deleted property is meant to be used internally. Users of the client should not need interact with that property. --- .../client/ml/job/config/Job.java | 27 +++---------------- 1 file changed, 4 insertions(+), 23 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java index aff74271f1c..7740d8cfc51 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java @@ -66,7 +66,6 @@ public class Job implements ToXContentObject { public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETED = new ParseField("deleted"); public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); @@ -100,7 +99,6 @@ public class Job implements ToXContentObject { PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), CUSTOM_SETTINGS, ValueType.OBJECT); PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - PARSER.declareBoolean(Builder::setDeleted, DELETED); } private final String jobId; @@ -123,14 +121,13 @@ public class Job implements ToXContentObject { private final Map customSettings; private final String modelSnapshotId; private final String resultsIndexName; - private final boolean deleted; private Job(String jobId, String jobType, List groups, String description, Date createTime, Date finishedTime, Date lastDataTime, Long establishedModelMemory, AnalysisConfig analysisConfig, AnalysisLimits analysisLimits, DataDescription dataDescription, ModelPlotConfig modelPlotConfig, Long renormalizationWindowDays, TimeValue backgroundPersistInterval, Long modelSnapshotRetentionDays, Long resultsRetentionDays, Map customSettings, - String modelSnapshotId, String resultsIndexName, boolean deleted) { + String modelSnapshotId, String resultsIndexName) { this.jobId = jobId; this.jobType = jobType; @@ -151,7 +148,6 @@ public class Job implements ToXContentObject { this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); this.modelSnapshotId = modelSnapshotId; this.resultsIndexName = resultsIndexName; - this.deleted = deleted; } /** @@ -296,10 +292,6 @@ public class Job implements ToXContentObject { return modelSnapshotId; } - public boolean isDeleted() { - return deleted; - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -359,9 +351,6 @@ public class Job implements ToXContentObject { if (resultsIndexName != null) { builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); } - if (params.paramAsBoolean("all", false)) { - builder.field(DELETED.getPreferredName(), deleted); - } builder.endObject(); return builder; } @@ -395,8 +384,7 @@ public class Job implements ToXContentObject { && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) && Objects.equals(this.customSettings, that.customSettings) && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleted, that.deleted); + && Objects.equals(this.resultsIndexName, that.resultsIndexName); } @Override @@ -404,7 +392,7 @@ public class Job implements ToXContentObject { return Objects.hash(jobId, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName, deleted); + modelSnapshotId, resultsIndexName); } @Override @@ -437,7 +425,6 @@ public class Job implements ToXContentObject { private Map customSettings; private String modelSnapshotId; private String resultsIndexName; - private boolean deleted; private Builder() { } @@ -466,7 +453,6 @@ public class Job implements ToXContentObject { this.customSettings = job.getCustomSettings(); this.modelSnapshotId = job.getModelSnapshotId(); this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleted = job.isDeleted(); } public Builder setId(String id) { @@ -573,11 +559,6 @@ public class Job implements ToXContentObject { return this; } - public Builder setDeleted(boolean deleted) { - this.deleted = deleted; - return this; - } - /** * Builds a job. * @@ -590,7 +571,7 @@ public class Job implements ToXContentObject { id, jobType, groups, description, createTime, finishedTime, lastDataTime, establishedModelMemory, analysisConfig, analysisLimits, dataDescription, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, - modelSnapshotId, resultsIndexName, deleted); + modelSnapshotId, resultsIndexName); } } } From 9fe5a273aac70d0685300de1d012566fbdada4f9 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 18 Sep 2018 15:55:16 +0200 Subject: [PATCH 20/32] [TEST] handle failed search requests differently --- .../elasticsearch/xpack/ccr/FollowIndexSecurityIT.java | 9 ++++++--- .../org/elasticsearch/xpack/ccr/FollowIndexIT.java | 10 +++++++--- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 60b9f8f23e8..a49ddd1dbef 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -274,11 +274,14 @@ public class FollowIndexSecurityIT extends ESRestTestCase { } private static void verifyCcrMonitoring(String expectedLeaderIndex, String expectedFollowerIndex) throws IOException { - ensureYellow(".monitoring-*"); - Request request = new Request("GET", "/.monitoring-*/_search"); request.setJsonEntity("{\"query\": {\"term\": {\"ccr_stats.leader_index\": \"leader_cluster:" + expectedLeaderIndex + "\"}}}"); - Map response = toMap(adminClient().performRequest(request)); + Map response; + try { + response = toMap(adminClient().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } int numberOfOperationsReceived = 0; int numberOfOperationsIndexed = 0; diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index c7ecbe184de..73a15410b07 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -9,6 +9,7 @@ import org.apache.http.HttpHost; import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -160,11 +161,14 @@ public class FollowIndexIT extends ESRestTestCase { } private static void verifyCcrMonitoring(final String expectedLeaderIndex, final String expectedFollowerIndex) throws IOException { - ensureYellow(".monitoring-*"); - Request request = new Request("GET", "/.monitoring-*/_search"); request.setJsonEntity("{\"query\": {\"term\": {\"ccr_stats.leader_index\": \"leader_cluster:" + expectedLeaderIndex + "\"}}}"); - Map response = toMap(client().performRequest(request)); + Map response; + try { + response = toMap(client().performRequest(request)); + } catch (ResponseException e) { + throw new AssertionError("error while searching", e); + } int numberOfOperationsReceived = 0; int numberOfOperationsIndexed = 0; From 3596512e6a3eeb74316f035d0dea1b31eafb60f3 Mon Sep 17 00:00:00 2001 From: Dan Tennery-Spalding Date: Tue, 18 Sep 2018 07:46:22 -0700 Subject: [PATCH 21/32] [DOCS] Corrected several grammar errors (#33781) --- docs/reference/search/request/sort.asciidoc | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index 2cee0f3a58c..544bea86b0d 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -1,7 +1,7 @@ [[search-request-sort]] === Sort -Allows to add one or more sort on specific fields. Each sort can be +Allows you to add one or more sorts on specific fields. Each sort can be reversed as well. The sort is defined on a per field level, with special field name for `_score` to sort by score, and `_doc` to sort by index order. @@ -223,7 +223,7 @@ scripts and sorting by geo distance. ==== Missing Values The `missing` parameter specifies how docs which are missing -the field should be treated: The `missing` value can be +the sort field should be treated: The `missing` value can be set to `_last`, `_first`, or a custom value (that will be used for missing docs as the sort value). The default is `_last`. @@ -250,7 +250,7 @@ the `nested_filter` then a missing value is used. ==== Ignoring Unmapped Fields By default, the search request will fail if there is no mapping -associated with a field. The `unmapped_type` option allows to ignore +associated with a field. The `unmapped_type` option allows you to ignore fields that have no mapping and not sort by them. The value of this parameter is used to determine what sort values to emit. Here is an example of how it can be used: @@ -322,7 +322,7 @@ GET /_search `ignore_unmapped`:: Indicates if the unmapped field should be treated as a missing value. Setting it to `true` is equivalent to specifying - an `unmapped_type` in the field sort. The default is `false` (unmapped field are causing the search to fail). + an `unmapped_type` in the field sort. The default is `false` (unmapped field cause the search to fail). NOTE: geo distance sorting does not support configurable missing values: the distance will always be considered equal to +Infinity+ when a document does not From 32ee6148d2eb1865e491e162ad5bd7e595cf0bb8 Mon Sep 17 00:00:00 2001 From: Abdon Pijpelink Date: Tue, 18 Sep 2018 16:57:33 +0200 Subject: [PATCH 22/32] [DOCS] Clarify scoring for multi_match phrase type (#32672) The original statement "Runs a match_phrase query on each field and combines the _score from each field." for the phrase type is a but misleading. The phrase type behaves like the best_fields type and does not combine the scores of each fields. --- docs/reference/query-dsl/multi-match-query.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/query-dsl/multi-match-query.asciidoc b/docs/reference/query-dsl/multi-match-query.asciidoc index edb6ff11da7..296689db289 100644 --- a/docs/reference/query-dsl/multi-match-query.asciidoc +++ b/docs/reference/query-dsl/multi-match-query.asciidoc @@ -83,8 +83,8 @@ parameter, which can be set to: were one big field. Looks for each word in *any* field. See <>. -`phrase`:: Runs a `match_phrase` query on each field and combines - the `_score` from each field. See <>. +`phrase`:: Runs a `match_phrase` query on each field and uses the `_score` + from the best field. See <>. `phrase_prefix`:: Runs a `match_phrase_prefix` query on each field and combines the `_score` from each field. See <>. From 3928921a1d38ccff2bd215f2cccd5876b50503b8 Mon Sep 17 00:00:00 2001 From: Tim Heckel Date: Tue, 18 Sep 2018 09:59:26 -0500 Subject: [PATCH 23/32] [DOCS] Update scroll.asciidoc (#32530) --- docs/reference/search/request/scroll.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/search/request/scroll.asciidoc b/docs/reference/search/request/scroll.asciidoc index 0fd6979ef95..c2d6dab550c 100644 --- a/docs/reference/search/request/scroll.asciidoc +++ b/docs/reference/search/request/scroll.asciidoc @@ -79,8 +79,8 @@ next batch of results until there are no more results left to return, ie the `hits` array is empty. IMPORTANT: The initial search request and each subsequent scroll request each -return a `_scroll_id`, which may change with each request -- only the most -recent `_scroll_id` should be used. +return a `_scroll_id`. While the `_scroll_id` may change between requests, it doesn’t +always change — in any case, only the most recently received `_scroll_id` should be used. NOTE: If the request specifies aggregations, only the initial search response will contain the aggregations results. From 7bed91549997a5f5736eae4bae1eb7f3f38843d7 Mon Sep 17 00:00:00 2001 From: Tomas Della Vedova Date: Tue, 18 Sep 2018 17:05:10 +0200 Subject: [PATCH 24/32] [DOCS] Fixed list formatting (#32963) --- .../resources/rest-api-spec/test/README.asciidoc | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc index 3ee03403874..d4b04ce2511 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc @@ -19,9 +19,10 @@ Test file structure -------------------- A YAML test file consists of: -* an optional `setup` section, followed by -* an optional `teardown` section, followed by -* one or more test sections + +- an optional `setup` section, followed by +- an optional `teardown` section, followed by +- one or more test sections For instance: @@ -216,11 +217,13 @@ sent to nodes that match the `node_selector`. It looks like this: If you list multiple selectors then the request will only go to nodes that match all of those selectors. The following selectors are supported: -* `version`: Only nodes who's version is within the range will receive the + +- `version`: Only nodes who's version is within the range will receive the request. The syntax for the pattern is the same as when `version` is within `skip`. -* `attribute`: Only nodes that have an attribute matching the name and value -of the provided attribute match. Looks like: +- `attribute`: Only nodes that have an attribute matching the name and value +of the provided attribute match. +Looks like: .... node_selector: attribute: From 91e45ca21b9490fe46d39aa6240a289a59304ab8 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Tue, 18 Sep 2018 18:51:48 +0300 Subject: [PATCH 25/32] SQL: Better handling of number parsing exceptions (#33776) Add proper exceptions in case the parsing of numbers (too large, invalid format) fails. Close #33622 --- .../xpack/sql/parser/ExpressionBuilder.java | 42 ++++++++++++++++--- .../xpack/sql/parser/ExpressionTests.java | 19 +++++++++ 2 files changed, 56 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 0c7ecbc7ddf..2719d39bbec 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -95,7 +95,7 @@ import org.joda.time.format.DateTimeFormatter; import org.joda.time.format.DateTimeFormatterBuilder; import org.joda.time.format.ISODateTimeFormat; -import java.math.BigDecimal; +import java.math.BigInteger; import java.util.List; import java.util.Locale; import java.util.Map; @@ -458,7 +458,13 @@ abstract class ExpressionBuilder extends IdentifierBuilder { @Override public Expression visitBooleanLiteral(BooleanLiteralContext ctx) { - return new Literal(source(ctx), Booleans.parseBoolean(ctx.getText().toLowerCase(Locale.ROOT), false), DataType.BOOLEAN); + boolean value; + try { + value = Booleans.parseBoolean(ctx.getText().toLowerCase(Locale.ROOT), false); + } catch(IllegalArgumentException iae) { + throw new ParsingException(source(ctx), iae.getMessage()); + } + return new Literal(source(ctx), Boolean.valueOf(value), DataType.BOOLEAN); } @Override @@ -472,14 +478,40 @@ abstract class ExpressionBuilder extends IdentifierBuilder { @Override public Literal visitDecimalLiteral(DecimalLiteralContext ctx) { - return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE); + double value; + try { + value = Double.parseDouble(ctx.getText()); + } catch (NumberFormatException nfe) { + throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText()); + } + if (Double.isInfinite(value)) { + throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText()); + } + if (Double.isNaN(value)) { + throw new ParsingException(source(ctx), "[{}] cannot be parsed as a number (NaN)", ctx.getText()); + } + return new Literal(source(ctx), Double.valueOf(value), DataType.DOUBLE); } @Override public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { - BigDecimal bigD = new BigDecimal(ctx.getText()); + long value; + try { + value = Long.parseLong(ctx.getText()); + } catch (NumberFormatException nfe) { + try { + BigInteger bi = new BigInteger(ctx.getText()); + try { + bi.longValueExact(); + } catch (ArithmeticException ae) { + throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText()); + } + } catch (NumberFormatException ex) { + // parsing fails, go through + } + throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText()); + } - long value = bigD.longValueExact(); DataType type = DataType.LONG; // try to downsize to int if possible (since that's the most common type) if ((int) value == value) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 004118e8cd2..466e749c9a3 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -22,6 +22,15 @@ public class ExpressionTests extends ESTestCase { assertEquals("LEFT", uf.functionName()); } + + public void testLiteralBoolean() throws Exception { + Expression lt = parser.createExpression("TRUE"); + assertEquals(Literal.class, lt.getClass()); + Literal l = (Literal) lt; + assertEquals(Boolean.TRUE, l.value()); + assertEquals(DataType.BOOLEAN, l.dataType()); + } + public void testLiteralDouble() throws Exception { Expression lt = parser.createExpression(String.valueOf(Double.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); @@ -92,4 +101,14 @@ public class ExpressionTests extends ESTestCase { assertEquals(Integer.valueOf(Byte.MAX_VALUE), l.value()); assertEquals(DataType.INTEGER, l.dataType()); } + + public void testLiteralIntegerInvalid() throws Exception { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("123456789098765432101")); + assertEquals("Number [123456789098765432101] is too large", ex.getErrorMessage()); + } + + public void testLiteralDecimalTooBig() throws Exception { + ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("1.9976931348623157e+308")); + assertEquals("Number [1.9976931348623157e+308] is too large", ex.getErrorMessage()); + } } \ No newline at end of file From bc12a948b5c7914f479d0de4cf3d4e3fa7a3fcf2 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 18 Sep 2018 12:04:35 -0400 Subject: [PATCH 26/32] Checkstyle: Package declarations (#33784) Make sure that all java files have a package declaration and that all of the package declarations line up with the directory structure. This would have caught the bug that I caused in 190ea9a6def9082348d983b16420ef02607d4c17 and fixed in b6d68bd805f1858a0210e381402236dea1d42509. --- buildSrc/src/main/resources/checkstyle.xml | 6 ++++++ buildSrc/src/main/resources/checkstyle_suppressions.xml | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/buildSrc/src/main/resources/checkstyle.xml b/buildSrc/src/main/resources/checkstyle.xml index e1000b3e4a9..939d48e72ce 100644 --- a/buildSrc/src/main/resources/checkstyle.xml +++ b/buildSrc/src/main/resources/checkstyle.xml @@ -62,7 +62,13 @@ --> + + + + + + From 241c74efb26b3d4a774bc9e8b8450d029c91a83a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 18 Sep 2018 18:16:40 +0200 Subject: [PATCH 27/32] upgrade to a new snapshot of Lucene 8 (7d0a7782fa) (#33812) --- buildSrc/version.properties | 2 +- .../lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-expressions-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-analyzers-icu-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + ...lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - ...lucene-analyzers-kuromoji-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-analyzers-nori-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + ...lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - ...lucene-analyzers-phonetic-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-analyzers-smartcn-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-analyzers-stempel-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + ...cene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - ...cene-analyzers-morfologik-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-analyzers-common-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-backward-codecs-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - server/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-grouping-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-highlighter-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - server/licenses/lucene-join-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-memory-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - server/licenses/lucene-misc-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-queries-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-queryparser-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-sandbox-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-spatial-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-spatial-extras-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../lucene-spatial3d-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-suggest-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + .../licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 - .../licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 | 1 + 49 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-grouping-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-join-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-memory-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-misc-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-queries-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-spatial-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 server/licenses/lucene-suggest-8.0.0-snapshot-7d0a7782fa.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index fee9a25aa35..5b611980f1c 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 8.0.0-snapshot-66c671ea80 +lucene = 8.0.0-snapshot-7d0a7782fa # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 047bca7b614..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -58b9db095c569b4c4da491810f14e1429878b594 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..4904c89e62f --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +cc072b68aac06a2fb9569ab7adce05302f130948 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 7369f427ab2..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f009ee188453aabae77fad55aea08bc60323bb3e \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..abc772945b1 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +429eb7e780c5a6e5200041a1f5b98bccd2623aaf \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 16417bbebd1..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -af3d2ae975e3560c1ea69222d6c46072857952ba \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..e103c8c0c7c --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +837fca1b1d7ca1dc002e53171801526644e52818 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 9c3524a6789..00000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f17bc5e532d9dc2786a13bd577df64023d1baae1 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..b7a23ee518f --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +1dde903172ade259cb26cbe320c25bc1d1356f89 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index ac81fdd07c2..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7ad89d33c1cd960c91afa05b22024137fe108567 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..08b07e7c2f4 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +b6ca20e96a989e6e6706b8b7b8ad8c82d2a03576 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index f00a29e7816..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3f11fb254256d74e911b953994b47e7a95915954 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..3f6fed19af1 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +c96a2f25dea18b383423a41aca296734353d4bbd \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 76fa8e90eae..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b2348d140ef0c3e674cb81173f61c5e5f430facb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..5dc03672c87 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +09363c5ce111d024a6da22a5ea8dbaf54d91dbd0 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 0e2c4d34ef0..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -485a0c3be58a5942b4a28639f1019181ef4cd0e3 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..e940b50d640 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +13c3840d49480014118de99ef6e07a9e55c50172 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 72f7319e6af..00000000000 --- a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a22f1c6749ca4a3fbc9b330161a8ea3301cac8de \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..4d9522f10de --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +dce55e44af096cb9029cb26d22a14d8a9c5223ce \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index f4bf99b4a03..00000000000 --- a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -41ce415b93d75662cc2e790d09120bc0234d6b1b \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..c86294acf5a --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +d1d941758dc91ea7c2d515dd97b5d9b23b0f1874 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 50a21f5c504..00000000000 --- a/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -06c1e4fa838807059d27aaf5405cfdfe7303369c \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..75200bc0c15 --- /dev/null +++ b/server/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +e884b8ce62a2102b24bfdbe8911674cd5b0d06d9 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 76bdfa1c6c4..00000000000 --- a/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5b0a019a938deb58160647e7640b348bb99c10a8 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..b1ae597fadf --- /dev/null +++ b/server/licenses/lucene-grouping-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +3870972c07d7fa41a3bc58eb65952da53a16a406 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 017225c0e46..00000000000 --- a/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d813f3ba0ddd56bac728edb88ed8875e6acfd18 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..02935671ce8 --- /dev/null +++ b/server/licenses/lucene-highlighter-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +b8f0b73cfd01fc48735f1e06f16f7ccb47fc183e \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 29cdbbfe69f..00000000000 --- a/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -00c7e20b6a35ebecc875dd52bfb324967c5555d6 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..fdfab321a67 --- /dev/null +++ b/server/licenses/lucene-join-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +1d253fae720355e2ff40d529d62c2b3de403d0d0 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 49087293afa..00000000000 --- a/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e4dbff54a0befdc7d67c0f39890586c220df718e \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..d7c9cdf3e41 --- /dev/null +++ b/server/licenses/lucene-memory-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +d9ca14bcda331a425d2d7c16022fdfd1c6942924 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 3c12235dff6..00000000000 --- a/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -74d17f6bdf1fa4d499f02904432aa3b1024bde88 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..93ec704aeae --- /dev/null +++ b/server/licenses/lucene-misc-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +200454bbfe5ec93d941d9a9d27703883122a4522 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index a423deb397d..00000000000 --- a/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bec78be38f777765146c35f65e247909563d6814 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..d57b6be7fbf --- /dev/null +++ b/server/licenses/lucene-queries-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +47915a125e54c845a4b540201cda88dc7612da08 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 79195ed1d5e..00000000000 --- a/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -74b76f8fed44400bc2a5d938ca2611a97b4d7a7c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..0ed04b6f69b --- /dev/null +++ b/server/licenses/lucene-queryparser-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +e5d49e1c6ee7550234539314e600e2893e13cb80 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index d5cd94b7fe5..00000000000 --- a/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f65fa728b3bc924db6538f4c3caf2fcd25451cf \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..41c6a4a243e --- /dev/null +++ b/server/licenses/lucene-sandbox-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +68081b60905f1b53b3705b9cfa4403b8aba44352 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 76857b72f01..00000000000 --- a/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -916a91f0cab2d3684707c59e9adca7b3030b2c66 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..63734717b2f --- /dev/null +++ b/server/licenses/lucene-spatial-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +c99d56a453cecc7258300fd04b438713b944f1b9 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 7ab84df992b..00000000000 --- a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb3e630d6013e41838fb277943ce921f256f1c61 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..3fa056da3db --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +2471966478f829b6455556346014f02ff59f50c0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index d793f4c54d9..00000000000 --- a/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fa10ff14eab2f579cff2f0fa33c9c7f3b24daf12 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..bd3d2e719a0 --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +46e012be699251306ad13f4582c30d79cea4b307 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 0ea0c2fb573..00000000000 --- a/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3dd65ca6612b4f98530847b99ab348fd83055fdf \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..8a4fc23cfcd --- /dev/null +++ b/server/licenses/lucene-suggest-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +dea19dd9e971d2a0171e7d78662f732b45148a27 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 deleted file mode 100644 index 50a21f5c504..00000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -06c1e4fa838807059d27aaf5405cfdfe7303369c \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 new file mode 100644 index 00000000000..75200bc0c15 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-7d0a7782fa.jar.sha1 @@ -0,0 +1 @@ +e884b8ce62a2102b24bfdbe8911674cd5b0d06d9 \ No newline at end of file From 98ccd9496275591fb81b5a24e98f5bd7ac3dc442 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 18 Sep 2018 19:53:26 +0200 Subject: [PATCH 28/32] Factor out a ChannelActionListener (#33819) We use similar / same concepts in SerachTransportService and HandledTransportAction but both duplicate the efforts with slightly different implementation details. This streamlines sending responses / exceptions back to a channel in an ActionListener with appropriate logging. --- .../action/search/SearchTransportService.java | 22 +------ .../support/HandledTransportAction.java | 58 ++++++++++++------- 2 files changed, 41 insertions(+), 39 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index a4ea2616e0a..9db297f4b92 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; @@ -348,25 +349,8 @@ public class SearchTransportService extends AbstractComponent { transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, (request, channel, task) -> { - searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } - } - }); + searchService.executeQueryPhase(request, (SearchTask) task, new HandledTransportAction.ChannelActionListener<>( + channel, QUERY_ACTION_NAME, request)); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, (request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new); diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index c55e0cff6f2..9de040a98b4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.action.support; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; @@ -27,7 +29,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportService; import java.util.function.Supplier; @@ -63,30 +67,44 @@ public abstract class HandledTransportAction { - @Override public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer - execute(task, request, new ActionListener() { - @Override - public void onResponse(Response response) { - try { - channel.sendResponse(response); - } catch (Exception e) { - onFailure(e); - } - } + Logger logger = HandledTransportAction.this.logger; + execute(task, request, new ChannelActionListener<>(channel, actionName, request)); + } + } - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (Exception e1) { - logger.warn(() -> new ParameterizedMessage( - "Failed to send error response for action [{}] and request [{}]", actionName, request), e1); - } - } - }); + public static final class ChannelActionListener implements + ActionListener { + private final Logger logger = LogManager.getLogger(getClass()); + private final TransportChannel channel; + private final Request request; + private final String actionName; + + public ChannelActionListener(TransportChannel channel, String actionName, Request request) { + this.channel = channel; + this.request = request; + this.actionName = actionName; + } + + @Override + public void onResponse(Response response) { + try { + channel.sendResponse(response); + } catch (Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (Exception e1) { + logger.warn(() -> new ParameterizedMessage( + "Failed to send error response for action [{}] and request [{}]", actionName, request), e1); + } } } From 9026c3ee92f1f13630a989023acd8c47829e3ec0 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 18 Sep 2018 19:53:42 +0200 Subject: [PATCH 29/32] Ensure realtime `_get` and `_termvectors` don't run on the network thread (#33814) The change in #27500 introduces this regression that causes `_get` and `_term_vector` actions to run on the network thread if the realtime flag is set. This fixes the issue by delegating to the super method forking on the corresponding threadpool. --- .../java/org/elasticsearch/action/get/TransportGetAction.java | 2 +- .../action/termvectors/TransportTermVectorsAction.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 0aeacb38ffa..63d3d30e1e2 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -81,7 +81,7 @@ public class TransportGetAction extends TransportSingleShardAction { try { diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java index a259f5b828a..d2a6055bbe7 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportTermVectorsAction.java @@ -90,7 +90,7 @@ public class TransportTermVectorsAction extends TransportSingleShardAction { try { From c6462057a15289bfb69a97441159b555b37d7d80 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 18 Sep 2018 20:43:31 +0200 Subject: [PATCH 30/32] MINOR: Remove Some Dead Code in Scripting (#33800) * The is default check method is not used in ScriptType * The removed vars on ExpressionSearchScript are unused --- .../expression/ExpressionSearchScript.java | 3 --- .../org/elasticsearch/script/ScriptType.java | 17 +++-------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java index cb19a604623..6df2b33127d 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionSearchScript.java @@ -25,7 +25,6 @@ import org.apache.lucene.expressions.SimpleBindings; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.DoubleValuesSource; -import org.apache.lucene.search.Scorer; import org.elasticsearch.script.GeneralScriptException; import org.elasticsearch.script.SearchScript; @@ -42,8 +41,6 @@ class ExpressionSearchScript implements SearchScript.LeafFactory { final DoubleValuesSource source; final ReplaceableConstDoubleValueSource specialValue; // _value final boolean needsScores; - Scorer scorer; - int docid; ExpressionSearchScript(Expression e, SimpleBindings b, ReplaceableConstDoubleValueSource v, boolean needsScores) { exprScript = e; diff --git a/server/src/main/java/org/elasticsearch/script/ScriptType.java b/server/src/main/java/org/elasticsearch/script/ScriptType.java index 2fdf283c57f..5d356bbd7cb 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptType.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptType.java @@ -41,7 +41,7 @@ public enum ScriptType implements Writeable { * (Groovy and others), but can be overridden by the specific {@link ScriptEngine} * if the language is naturally secure (Painless, Mustache, and Expressions). */ - INLINE ( 0 , new ParseField("source", "inline") , false ), + INLINE ( 0 , new ParseField("source", "inline")), /** * STORED scripts are saved as part of the {@link org.elasticsearch.cluster.ClusterState} @@ -50,7 +50,7 @@ public enum ScriptType implements Writeable { * (Groovy and others), but can be overridden by the specific {@link ScriptEngine} * if the language is naturally secure (Painless, Mustache, and Expressions). */ - STORED ( 1 , new ParseField("id", "stored") , false ); + STORED ( 1 , new ParseField("id", "stored")); /** * Reads an int from the input stream and converts it to a {@link ScriptType}. @@ -73,18 +73,15 @@ public enum ScriptType implements Writeable { private final int id; private final ParseField parseField; - private final boolean defaultEnabled; /** * Standard constructor. * @param id A unique identifier for a type that can be read/written to a stream. * @param parseField Specifies the name used to parse input from queries. - * @param defaultEnabled Whether or not a {@link ScriptType} can be run by default. */ - ScriptType(int id, ParseField parseField, boolean defaultEnabled) { + ScriptType(int id, ParseField parseField) { this.id = id; this.parseField = parseField; - this.defaultEnabled = defaultEnabled; } public void writeTo(StreamOutput out) throws IOException { @@ -112,14 +109,6 @@ public enum ScriptType implements Writeable { return parseField; } - /** - * @return Whether or not a {@link ScriptType} can be run by default. Note - * this can be potentially overridden by any {@link ScriptEngine}. - */ - public boolean isDefaultEnabled() { - return defaultEnabled; - } - /** * @return The same as calling {@link #getName()}. */ From f4cbbcf98b8b0acfdabbc2a8dfee28bd3536bbca Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Tue, 18 Sep 2018 15:25:20 -0400 Subject: [PATCH 31/32] Add ES version 6.4.2 (#33831) Version and properties files --- server/src/main/java/org/elasticsearch/Version.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 01738930b4b..e6939edbd89 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -101,6 +101,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_4_0 = new Version(V_6_4_0_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_4_1_ID = 6040199; public static final Version V_6_4_1 = new Version(V_6_4_1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); + public static final int V_6_4_2_ID = 6040299; + public static final Version V_6_4_2 = new Version(V_6_4_2_ID, org.apache.lucene.util.Version.LUCENE_7_4_0); public static final int V_6_5_0_ID = 6050099; public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final int V_7_0_0_alpha1_ID = 7000001; @@ -123,6 +125,8 @@ public class Version implements Comparable, ToXContentFragment { return V_7_0_0_alpha1; case V_6_5_0_ID: return V_6_5_0; + case V_6_4_2_ID: + return V_6_4_2; case V_6_4_1_ID: return V_6_4_1; case V_6_4_0_ID: From 805a12361fe30f940121627a8a9398e7d93cffef Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 18 Sep 2018 21:47:02 +0200 Subject: [PATCH 32/32] [CCR] Fail with a descriptive error if leader index does not exist (#33797) Closes #33737 --- .../org/elasticsearch/xpack/ccr/FollowIndexIT.java | 13 +++++++++++++ .../elasticsearch/xpack/ccr/CcrLicenseChecker.java | 6 ++++++ .../action/TransportCreateAndFollowIndexAction.java | 6 ++++++ .../org/elasticsearch/xpack/ccr/ShardChangesIT.java | 9 +++++++++ 4 files changed, 34 insertions(+) diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 73a15410b07..f108e033691 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -25,6 +25,7 @@ import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -82,6 +83,18 @@ public class FollowIndexIT extends ESRestTestCase { } } + public void testFollowNonExistingLeaderIndex() throws Exception { + assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); + ResponseException e = expectThrows(ResponseException.class, + () -> followIndex("leader_cluster:non-existing-index", "non-existing-index")); + assertThat(e.getMessage(), containsString("no such index")); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + + e = expectThrows(ResponseException.class, () -> createAndFollowIndex("leader_cluster:non-existing-index", "non-existing-index")); + assertThat(e.getMessage(), containsString("no such index")); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404)); + } + public void testAutoFollowPatterns() throws Exception { assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index f597871fc66..065b3ffd4f5 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.CheckedConsumer; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.shard.ShardId; @@ -109,6 +110,11 @@ public final class CcrLicenseChecker { onFailure, leaderClusterState -> { IndexMetaData leaderIndexMetaData = leaderClusterState.getMetaData().index(leaderIndex); + if (leaderIndexMetaData == null) { + onFailure.accept(new IndexNotFoundException(leaderIndex)); + return; + } + final Client leaderClient = client.getRemoteClusterClient(clusterAlias); fetchLeaderHistoryUUIDs(leaderClient, leaderIndexMetaData, onFailure, historyUUIDs -> { consumer.accept(historyUUIDs, leaderIndexMetaData); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java index e795a903729..fd421a9380b 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCreateAndFollowIndexAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; @@ -121,6 +122,11 @@ public final class TransportCreateAndFollowIndexAction // following an index in local cluster, so use local cluster state to fetch leader index metadata final String leaderIndex = request.getFollowRequest().getLeaderIndex(); final IndexMetaData leaderIndexMetadata = state.getMetaData().index(leaderIndex); + if (leaderIndexMetadata == null) { + listener.onFailure(new IndexNotFoundException(leaderIndex)); + return; + } + Consumer handler = historyUUIDs -> { createFollowerIndex(leaderIndexMetadata, historyUUIDs, request, listener); }; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java index 3d1789389d7..472098dd4fa 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/ShardChangesIT.java @@ -430,12 +430,21 @@ public class ShardChangesIT extends ESIntegTestCase { // Leader index does not exist. FollowIndexAction.Request followRequest1 = createFollowRequest("non-existent-leader", "test-follower"); expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest1).actionGet()); + expectThrows(IndexNotFoundException.class, + () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest1)) + .actionGet()); // Follower index does not exist. FollowIndexAction.Request followRequest2 = createFollowRequest("non-test-leader", "non-existent-follower"); expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest2).actionGet()); + expectThrows(IndexNotFoundException.class, + () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest2)) + .actionGet()); // Both indices do not exist. FollowIndexAction.Request followRequest3 = createFollowRequest("non-existent-leader", "non-existent-follower"); expectThrows(IndexNotFoundException.class, () -> client().execute(FollowIndexAction.INSTANCE, followRequest3).actionGet()); + expectThrows(IndexNotFoundException.class, + () -> client().execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest3)) + .actionGet()); } public void testFollowIndex_lowMaxTranslogBytes() throws Exception {